Index: ql/src/test/results/clientpositive/groupby_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy)
@@ -69,10 +69,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -84,10 +89,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -106,10 +116,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -121,10 +136,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/stats8.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats8.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats8.q.out (working copy)
@@ -77,9 +77,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -89,15 +89,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart
+POSTHOOK: query: describe formatted analyze_srcpart
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -107,12 +136,42 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics
PREHOOK: type: QUERY
POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics
@@ -162,9 +221,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -174,12 +233,41 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics
PREHOOK: type: QUERY
POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics
@@ -229,9 +317,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -241,12 +329,41 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics
PREHOOK: type: QUERY
POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics
@@ -296,9 +413,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -308,12 +425,41 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics
PREHOOK: type: QUERY
POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics
@@ -375,9 +521,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -387,15 +533,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -405,15 +580,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -423,15 +627,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 2000
+ rawDataSize 21248
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -441,15 +674,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart
+POSTHOOK: query: describe formatted analyze_srcpart
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -459,9 +721,39 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 4
+ numPartitions 4
+ numRows 2000
+ rawDataSize 21248
+ totalSize 23248
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/input_part7.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part7.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input_part7.q.out (working copy)
@@ -149,10 +149,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -164,10 +169,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -186,10 +196,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -201,10 +216,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy)
@@ -252,10 +252,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -268,10 +273,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11624
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
@@ -290,10 +300,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -306,10 +321,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11624
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
@@ -781,10 +801,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 3062
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -797,10 +822,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 6124
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2
@@ -819,10 +849,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 3062
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -835,10 +870,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 6124
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2
Index: ql/src/test/results/clientpositive/pcr.q.out
===================================================================
--- ql/src/test/results/clientpositive/pcr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/pcr.q.out (working copy)
@@ -4833,10 +4833,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -4848,10 +4853,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -4992,10 +5002,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5007,10 +5022,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -5029,10 +5049,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5044,10 +5069,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -5193,10 +5223,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5208,10 +5243,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -5230,10 +5270,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5245,10 +5290,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/stats3.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats3.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats3.q.out (working copy)
@@ -11,12 +11,90 @@
POSTHOOK: query: create table hive_test_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@hive_test_src
+PREHOOK: query: explain extended
+load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+PREHOOK: type: LOAD
+POSTHOOK: query: explain extended
+load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+POSTHOOK: type: LOAD
+ABSTRACT SYNTAX TREE:
+ (TOK_LOAD '../data/files/test.dat' (TOK_TAB (TOK_TABNAME hive_test_src)) local overwrite)
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+ Stage-1 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-0
+ Copy
+#### A masked pattern was here ####
+
+ Stage: Stage-1
+ Move Operator
+ tables:
+ replace: true
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns col1
+ columns.types string
+#### A masked pattern was here ####
+ name default.hive_test_src
+ serialization.ddl struct hive_test_src { string col1}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.hive_test_src
+#### A masked pattern was here ####
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_test_src
POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_test_src
+PREHOOK: query: desc formatted hive_test_src
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc formatted hive_test_src
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+col1 string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
+ totalSize 11
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
PREHOOK: type: CREATETABLE
POSTHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
@@ -117,17 +195,47 @@
#### A masked pattern was here ####
POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
-PREHOOK: query: describe extended hive_test_dst
+PREHOOK: query: describe formatted hive_test_dst
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended hive_test_dst
+POSTHOOK: query: describe formatted hive_test_dst
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
-col1 string
-pcol1 string
-pcol2 string
+# col_name data_type comment
+col1 string None
+
+# Partition Information
+# col_name data_type comment
+
+pcol1 string None
+pcol2 string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 1
+ numRows 6
+ rawDataSize 6
+ totalSize 171
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: drop table hive_test_src
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@hive_test_src
Index: ql/src/test/results/clientpositive/join33.q.out
===================================================================
--- ql/src/test/results/clientpositive/join33.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join33.q.out (working copy)
@@ -98,9 +98,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -112,9 +117,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -197,10 +207,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -212,10 +227,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/input_part2.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input_part2.q.out (working copy)
@@ -166,10 +166,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -181,10 +186,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -203,10 +213,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -218,10 +233,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/load_dyn_part8.q.out
===================================================================
--- ql/src/test/results/clientpositive/load_dyn_part8.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/load_dyn_part8.q.out (working copy)
@@ -148,10 +148,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -163,10 +168,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -185,10 +195,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -200,10 +215,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -222,10 +242,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -237,10 +262,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -259,10 +289,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -274,10 +309,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/sample9.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample9.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample9.q.out (working copy)
@@ -73,9 +73,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -88,9 +93,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
Index: ql/src/test/results/clientpositive/describe_table.q.out
===================================================================
--- ql/src/test/results/clientpositive/describe_table.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/describe_table.q.out (working copy)
@@ -67,6 +67,11 @@
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
+ numFiles 4
+ numPartitions 4
+ numRows 0
+ rawDataSize 0
+ totalSize 23248
#### A masked pattern was here ####
# Storage Information
@@ -109,6 +114,10 @@
Protect Mode: None
#### A masked pattern was here ####
Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
#### A masked pattern was here ####
# Storage Information
Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy)
@@ -86,10 +86,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -101,10 +106,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -123,10 +133,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -138,10 +153,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/sample4.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample4.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample4.q.out (working copy)
@@ -88,9 +88,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -103,9 +108,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
Index: ql/src/test/results/clientpositive/stats18.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats18.q.out (revision 0)
+++ ql/src/test/results/clientpositive/stats18.q.out (working copy)
@@ -0,0 +1,127 @@
+PREHOOK: query: create table stats_part like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table stats_part like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@stats_part
+PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
+POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- Load a file into a existing partition
+-- Some stats (numFiles, totalSize) should be updated correctly
+-- Some other stats (numRows, rawDataSize) should be cleared
+desc formatted stats_part partition (ds='2010-04-08', hr='13')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: -- Load a file into a existing partition
+-- Some stats (numFiles, totalSize) should be updated correctly
+-- Some other stats (numRows, rawDataSize) should be cleared
+desc formatted stats_part partition (ds='2010-04-08', hr='13')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+# col_name data_type comment
+
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2010-04-08, 13]
+Database: default
+Table: stats_part
+#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
+POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr='13')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr='13')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+# col_name data_type comment
+
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2010-04-08, 13]
+Database: default
+Table: stats_part
+#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 2
+ numRows 0
+ rawDataSize 0
+ totalSize 7170
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: drop table stats_src
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table stats_src
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: drop table stats_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@stats_part
+PREHOOK: Output: default@stats_part
+POSTHOOK: query: drop table stats_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@stats_part
+POSTHOOK: Output: default@stats_part
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Index: ql/src/test/results/clientpositive/stats13.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats13.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats13.q.out (working copy)
@@ -123,9 +123,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc extended analyze_srcpart
+PREHOOK: query: desc formatted analyze_srcpart
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart
+POSTHOOK: query: desc formatted analyze_srcpart
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -135,15 +135,45 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -153,15 +183,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -171,15 +230,40 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -189,15 +273,40 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -207,12 +316,37 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: create table analyze_srcpart2 like analyze_srcpart
PREHOOK: type: CREATETABLE
POSTHOOK: query: create table analyze_srcpart2 like analyze_srcpart
@@ -226,9 +360,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc extended analyze_srcpart2
+PREHOOK: query: desc formatted analyze_srcpart2
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart2
+POSTHOOK: query: desc formatted analyze_srcpart2
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -238,9 +372,34 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy)
@@ -63,9 +63,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -77,9 +82,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/bucketcontext_2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketcontext_2.q.out (working copy)
@@ -157,10 +157,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -174,10 +179,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5500
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -197,10 +207,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -214,10 +229,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5500
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -405,10 +425,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -422,10 +447,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5500
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -445,10 +475,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -462,10 +497,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5500
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
Index: ql/src/test/results/clientpositive/bucket2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucket2.q.out (working copy)
@@ -60,9 +60,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -74,9 +79,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy)
@@ -96,10 +96,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -111,10 +116,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -133,10 +143,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -148,10 +163,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/join17.q.out
===================================================================
--- ql/src/test/results/clientpositive/join17.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join17.q.out (working copy)
@@ -74,9 +74,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -88,9 +93,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/input_part9.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part9.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input_part9.q.out (working copy)
@@ -70,10 +70,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -85,10 +90,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -107,10 +117,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -122,10 +137,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/join26.q.out
===================================================================
--- ql/src/test/results/clientpositive/join26.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join26.q.out (working copy)
@@ -155,10 +155,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -170,10 +175,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy)
@@ -173,9 +173,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -188,9 +193,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin
Index: ql/src/test/results/clientpositive/stats5.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats5.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats5.q.out (working copy)
@@ -36,11 +36,37 @@
POSTHOOK: type: QUERY
POSTHOOK: Input: default@analyze_src
POSTHOOK: Output: default@analyze_src
-PREHOOK: query: describe extended analyze_src
+PREHOOK: query: describe formatted analyze_src
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_src
+POSTHOOK: query: describe formatted analyze_src
POSTHOOK: type: DESCTABLE
-key string
-value string
+# col_name data_type comment
+key string None
+value string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/join35.q.out
===================================================================
--- ql/src/test/results/clientpositive/join35.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join35.q.out (working copy)
@@ -93,9 +93,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -107,9 +112,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -566,9 +576,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -580,9 +595,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy)
@@ -197,10 +197,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -213,10 +218,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
@@ -684,10 +694,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 3062
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -700,10 +715,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 3062
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2
@@ -1365,10 +1385,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1381,10 +1406,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
Index: ql/src/test/results/clientpositive/join_map_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy)
@@ -161,10 +161,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -176,10 +181,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -710,10 +720,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -725,10 +740,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/stats0.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats0.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats0.q.out (working copy)
@@ -73,9 +73,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -87,9 +92,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -1391,9 +1401,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1405,9 +1420,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/join9.q.out
===================================================================
--- ql/src/test/results/clientpositive/join9.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join9.q.out (working copy)
@@ -74,9 +74,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -88,9 +93,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -109,10 +119,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -124,10 +139,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (working copy)
@@ -78,10 +78,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -93,10 +98,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -115,10 +125,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -130,10 +145,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -246,10 +266,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -261,10 +286,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -283,10 +313,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -298,10 +333,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -320,10 +360,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -335,10 +380,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -357,10 +407,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -372,10 +427,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/sample6.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample6.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample6.q.out (working copy)
@@ -86,9 +86,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -101,9 +106,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
@@ -614,9 +624,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -629,9 +644,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
@@ -976,9 +996,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -991,9 +1016,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
@@ -1592,9 +1622,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1607,9 +1642,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
@@ -2051,9 +2091,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2066,9 +2111,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
@@ -2496,9 +2546,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket2
+ numFiles 4
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2511,9 +2566,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket2
+ numFiles 4
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket2
@@ -2530,9 +2590,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket2
+ numFiles 4
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2545,9 +2610,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket2
+ numFiles 4
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket2
@@ -2775,9 +2845,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket2
+ numFiles 4
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2790,9 +2865,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket2
+ numFiles 4
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket2
Index: ql/src/test/results/clientpositive/bucket_map_join_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket_map_join_1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucket_map_join_1.q.out (working copy)
@@ -136,9 +136,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.table1
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct table1 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 20
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -152,9 +157,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.table1
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct table1 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 20
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.table1
@@ -232,7 +242,6 @@
Fetch Operator
limit: -1
-
PREHOOK: query: select /*+ mapjoin(b) */ count(*) from table1 a join table2 b on a.key=b.key and a.value=b.value
PREHOOK: type: QUERY
PREHOOK: Input: default@table1
Index: ql/src/test/results/clientpositive/sample1.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample1.q.out (working copy)
@@ -105,10 +105,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -120,10 +125,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out
===================================================================
--- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy)
@@ -63,9 +63,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -77,9 +82,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy)
@@ -72,10 +72,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -87,10 +92,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -186,10 +196,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -201,10 +216,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/bucketcontext_4.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_4.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketcontext_4.q.out (working copy)
@@ -169,10 +169,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -186,10 +191,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -377,10 +387,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -394,10 +409,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
Index: ql/src/test/results/clientpositive/stats10.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats10.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats10.q.out (working copy)
@@ -430,9 +430,9 @@
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended bucket3_1 partition (ds='1')
+PREHOOK: query: describe formatted bucket3_1 partition (ds='1')
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended bucket3_1 partition (ds='1')
+POSTHOOK: query: describe formatted bucket3_1 partition (ds='1')
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
@@ -440,14 +440,43 @@
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-key int
-value string
-ds string
+# col_name data_type comment
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Partition Information
+Partition Value: [1]
+Database: default
+Table: bucket3_1
#### A masked pattern was here ####
-PREHOOK: query: describe extended bucket3_1 partition (ds='2')
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 2
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 2
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted bucket3_1 partition (ds='2')
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended bucket3_1 partition (ds='2')
+POSTHOOK: query: describe formatted bucket3_1 partition (ds='2')
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
@@ -455,14 +484,43 @@
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-key int
-value string
-ds string
+# col_name data_type comment
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Partition Information
+Partition Value: [2]
+Database: default
+Table: bucket3_1
#### A masked pattern was here ####
-PREHOOK: query: describe extended bucket3_1
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 2
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted bucket3_1
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended bucket3_1
+POSTHOOK: query: describe formatted bucket3_1
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
@@ -470,8 +528,38 @@
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-key int
-value string
-ds string
+# col_name data_type comment
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 4
+ numPartitions 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 11624
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 2
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/bucket4.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket4.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucket4.q.out (working copy)
@@ -63,9 +63,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -77,9 +82,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/udtf_explode.q.out
===================================================================
--- ql/src/test/results/clientpositive/udtf_explode.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/udtf_explode.q.out (working copy)
@@ -67,9 +67,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -81,9 +86,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -143,9 +153,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -157,9 +172,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -395,9 +415,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -409,9 +434,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/binary_output_format.q.out
===================================================================
--- ql/src/test/results/clientpositive/binary_output_format.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/binary_output_format.q.out (working copy)
@@ -128,9 +128,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -142,9 +147,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/stats7.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats7.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats7.q.out (working copy)
@@ -81,9 +81,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -93,15 +93,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 1000
+ rawDataSize 10624
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -111,15 +140,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart
+POSTHOOK: query: describe formatted analyze_srcpart
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -129,9 +187,39 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 2
+ numPartitions 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 11624
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy)
@@ -212,9 +212,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -227,9 +232,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin
@@ -691,9 +701,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -706,9 +721,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin
Index: ql/src/test/results/clientpositive/stats2.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats2.q.out (working copy)
@@ -82,9 +82,9 @@
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc extended analyze_t1
+PREHOOK: query: desc formatted analyze_t1
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_t1
+POSTHOOK: query: desc formatted analyze_t1
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -94,12 +94,37 @@
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: explain analyze table analyze_t1 partition (ds, hr) compute statistics
PREHOOK: type: QUERY
POSTHOOK: query: explain analyze table analyze_t1 partition (ds, hr) compute statistics
@@ -161,9 +186,9 @@
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_t1
+PREHOOK: query: describe formatted analyze_t1
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_t1
+POSTHOOK: query: describe formatted analyze_t1
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -173,9 +198,39 @@
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 4
+ numPartitions 4
+ numRows 2000
+ rawDataSize 21248
+ totalSize 23248
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/join32.q.out
===================================================================
--- ql/src/test/results/clientpositive/join32.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join32.q.out (working copy)
@@ -104,9 +104,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -118,9 +123,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/input_part1.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input_part1.q.out (working copy)
@@ -101,10 +101,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -116,10 +121,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/sample8.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample8.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample8.q.out (working copy)
@@ -84,10 +84,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -99,10 +104,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -121,10 +131,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -136,10 +151,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -158,10 +178,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -173,10 +198,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -195,10 +225,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -210,10 +245,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/transform_ppr2.q.out
===================================================================
--- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy)
@@ -91,10 +91,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -106,10 +111,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -128,10 +138,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -143,10 +158,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/union_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy)
@@ -151,10 +151,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -166,10 +171,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -188,10 +198,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -203,10 +218,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/stats12.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats12.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats12.q.out (working copy)
@@ -164,9 +164,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc extended analyze_srcpart
+PREHOOK: query: desc formatted analyze_srcpart
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart
+POSTHOOK: query: desc formatted analyze_srcpart
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -176,15 +176,45 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 2
+ numPartitions 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 11624
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -194,15 +224,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -212,15 +271,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 1000
+ rawDataSize 10624
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -230,15 +318,40 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -248,9 +361,34 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/router_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy)
@@ -90,9 +90,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -104,9 +109,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -125,10 +135,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -140,10 +155,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -162,10 +182,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -177,10 +202,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -199,10 +229,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -214,10 +249,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -236,10 +276,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -251,10 +296,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -440,9 +490,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -454,9 +509,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -475,10 +535,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -490,10 +555,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -512,10 +582,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -527,10 +602,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -711,9 +791,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -725,9 +810,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -746,10 +836,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -761,10 +856,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -783,10 +883,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -798,10 +903,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -982,9 +1092,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -996,9 +1111,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -1017,10 +1137,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1032,10 +1157,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1054,10 +1184,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1069,10 +1204,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1091,10 +1231,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1106,10 +1251,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1128,10 +1278,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1143,10 +1298,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/bucketcontext_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketcontext_1.q.out (working copy)
@@ -169,10 +169,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -186,10 +191,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11624
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -209,10 +219,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -226,10 +241,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11624
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -386,10 +406,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -403,10 +428,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11624
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -426,10 +456,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -443,10 +478,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11624
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
Index: ql/src/test/results/clientpositive/bucket1.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucket1.q.out (working copy)
@@ -60,9 +60,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -74,9 +79,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/input42.q.out
===================================================================
--- ql/src/test/results/clientpositive/input42.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input42.q.out (working copy)
@@ -65,10 +65,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -80,10 +85,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -102,10 +112,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -117,10 +132,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1232,10 +1252,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1247,10 +1272,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1269,10 +1299,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1284,10 +1319,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1777,10 +1817,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1792,10 +1837,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1814,10 +1864,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1829,10 +1884,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/stats9.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats9.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats9.q.out (working copy)
@@ -48,13 +48,39 @@
POSTHOOK: Output: default@analyze_srcbucket
POSTHOOK: Lineage: analyze_srcbucket.key SIMPLE [(srcbucket)srcbucket.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: analyze_srcbucket.value SIMPLE [(srcbucket)srcbucket.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: describe extended analyze_srcbucket
+PREHOOK: query: describe formatted analyze_srcbucket
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcbucket
+POSTHOOK: query: describe formatted analyze_srcbucket
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcbucket.key SIMPLE [(srcbucket)srcbucket.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: analyze_srcbucket.value SIMPLE [(srcbucket)srcbucket.FieldSchema(name:value, type:string, comment:null), ]
-key int
-value string
+# col_name data_type comment
+key int None
+value string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 1000
+ rawDataSize 10603
+ totalSize 11603
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 2
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/stats4.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats4.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats4.q.out (working copy)
@@ -2315,9 +2315,9 @@
400 val_400 2008-12-31 12
200 val_200 2008-12-31 12
97 val_97 2008-12-31 12
-PREHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=11)
+PREHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=11)
+POSTHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -2327,15 +2327,44 @@
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: nzhang_part1
#### A masked pattern was here ####
-PREHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=12)
+POSTHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -2345,15 +2374,44 @@
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: nzhang_part1
#### A masked pattern was here ####
-PREHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=11)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=11)
+POSTHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -2363,15 +2421,44 @@
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-12-31, 11]
+Database: default
+Table: nzhang_part2
#### A masked pattern was here ####
-PREHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=12)
+POSTHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -2381,15 +2468,44 @@
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-12-31, 12]
+Database: default
+Table: nzhang_part2
#### A masked pattern was here ####
-PREHOOK: query: describe extended nzhang_part1
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted nzhang_part1
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended nzhang_part1
+POSTHOOK: query: describe formatted nzhang_part1
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -2399,15 +2515,45 @@
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
-PREHOOK: query: describe extended nzhang_part2
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 2
+ numPartitions 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 11624
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted nzhang_part2
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended nzhang_part2
+POSTHOOK: query: describe formatted nzhang_part2
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -2417,12 +2563,42 @@
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 2
+ numPartitions 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 11624
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: drop table nzhang_part1
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@nzhang_part1
Index: ql/src/test/results/clientpositive/join34.q.out
===================================================================
--- ql/src/test/results/clientpositive/join34.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/join34.q.out (working copy)
@@ -221,9 +221,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -235,9 +240,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy)
@@ -466,9 +466,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -481,9 +486,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin
@@ -953,10 +963,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -969,10 +984,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy)
@@ -88,9 +88,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -102,9 +107,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -123,10 +133,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -138,10 +153,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -160,10 +180,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -175,10 +200,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -359,9 +389,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -373,9 +408,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -394,10 +434,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -409,10 +454,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -431,10 +481,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -446,10 +501,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -468,10 +528,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -483,10 +548,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -505,10 +575,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -520,10 +595,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -711,9 +791,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -725,9 +810,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -746,10 +836,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -761,10 +856,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -783,10 +883,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -798,10 +903,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -820,10 +930,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -835,10 +950,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -857,10 +977,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -872,10 +997,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1060,9 +1190,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1074,9 +1209,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -1095,10 +1235,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1110,10 +1255,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -1132,10 +1282,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1147,10 +1302,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/udf_java_method.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_java_method.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/udf_java_method.q.out (working copy)
@@ -100,9 +100,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -114,9 +119,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/sample5.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample5.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample5.q.out (working copy)
@@ -86,9 +86,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -101,9 +106,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
Index: ql/src/test/results/clientpositive/udf_explode.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_explode.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/udf_explode.q.out (working copy)
@@ -67,9 +67,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -81,9 +86,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -143,9 +153,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -157,9 +172,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -365,9 +385,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -379,9 +404,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -443,9 +473,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -457,9 +492,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy)
@@ -94,10 +94,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -109,10 +114,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -131,10 +141,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -146,10 +161,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/bucketcontext_3.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_3.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketcontext_3.q.out (working copy)
@@ -157,10 +157,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -174,10 +179,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
@@ -365,10 +375,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -382,10 +397,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.bucket_big
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucket_big
Index: ql/src/test/results/clientpositive/bucket3.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket3.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucket3.q.out (working copy)
@@ -60,9 +60,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -74,9 +79,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy)
@@ -71,10 +71,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -86,10 +91,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -108,10 +118,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -123,10 +138,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/input4.q.out
===================================================================
--- ql/src/test/results/clientpositive/input4.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input4.q.out (working copy)
@@ -15,6 +15,7 @@
STAGE DEPENDENCIES:
Stage-0 is a root stage
Stage-1 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-0
@@ -31,7 +32,10 @@
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.input4
+ Stage: Stage-2
+ Stats-Aggr Operator
+
PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
PREHOOK: type: LOAD
PREHOOK: Output: default@input4
Index: ql/src/test/results/clientpositive/stats6.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats6.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats6.q.out (working copy)
@@ -64,9 +64,9 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -76,15 +76,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -94,15 +123,44 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -112,15 +170,40 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 11]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
+POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -130,15 +213,40 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-09, 12]
+Database: default
+Table: analyze_srcpart
#### A masked pattern was here ####
-PREHOOK: query: describe extended analyze_srcpart
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: describe formatted analyze_srcpart
PREHOOK: type: DESCTABLE
-POSTHOOK: query: describe extended analyze_srcpart
+POSTHOOK: query: describe formatted analyze_srcpart
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
@@ -148,9 +256,39 @@
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-key string default
-value string default
-ds string
-hr string
+# col_name data_type comment
+key string default
+value string default
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+hr string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 2
+ numPartitions 2
+ numRows 1000
+ rawDataSize 10624
+ totalSize 11624
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy)
@@ -214,10 +214,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 3062
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -230,10 +235,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 3062
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2
@@ -701,10 +711,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -717,10 +732,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
Index: ql/src/test/results/clientpositive/stats1.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats1.q.out (working copy)
@@ -185,13 +185,89 @@
66 val_66
98 val_98
tst1 500
-PREHOOK: query: DESCRIBE EXTENDED tmptable
+PREHOOK: query: DESCRIBE FORMATTED tmptable
PREHOOK: type: DESCTABLE
-POSTHOOK: query: DESCRIBE EXTENDED tmptable
+POSTHOOK: query: DESCRIBE FORMATTED tmptable
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ]
-key string
-value string
+# col_name data_type comment
+key string None
+value string None
+
+# Detailed Table Information
+Database: default
#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 2
+ numPartitions 0
+ numRows 26
+ rawDataSize 199
+ totalSize 225
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: -- Load a file into a existing table
+-- Some stats (numFiles, totalSize) should be updated correctly
+-- Some other stats (numRows, rawDataSize) should be cleared
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tmptable
+POSTHOOK: query: -- Load a file into a existing table
+-- Some stats (numFiles, totalSize) should be updated correctly
+-- Some other stats (numRows, rawDataSize) should be cleared
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tmptable
+POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: DESCRIBE FORMATTED tmptable
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED tmptable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ]
+# col_name data_type comment
+
+key string None
+value string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 3
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
+ totalSize 1583
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Index: ql/src/test/results/clientpositive/ctas.q.out
===================================================================
--- ql/src/test/results/clientpositive/ctas.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/ctas.q.out (working copy)
@@ -774,9 +774,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -788,9 +793,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy)
@@ -80,9 +80,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -94,9 +99,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -115,10 +125,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -130,10 +145,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -152,10 +172,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -167,10 +192,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -189,10 +219,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -204,10 +239,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -226,10 +266,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -241,10 +286,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -422,9 +472,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -436,9 +491,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -457,10 +517,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -472,10 +537,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -494,10 +564,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -509,10 +584,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -531,10 +611,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -546,10 +631,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -568,10 +658,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -583,10 +678,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/udf_reflect.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_reflect.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/udf_reflect.q.out (working copy)
@@ -96,9 +96,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -110,9 +115,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy)
@@ -185,9 +185,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -200,9 +205,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin
Index: ql/src/test/results/clientpositive/sample7.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample7.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample7.q.out (working copy)
@@ -93,9 +93,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -108,9 +113,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
Index: ql/src/test/results/clientpositive/transform_ppr1.q.out
===================================================================
--- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy)
@@ -89,10 +89,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -104,10 +109,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -126,10 +136,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -141,10 +156,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -163,10 +183,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -178,10 +203,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
@@ -200,10 +230,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -215,10 +250,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/clientpositive/regexp_extract.q.out
===================================================================
--- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy)
@@ -87,9 +87,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -101,9 +106,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
@@ -333,9 +343,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -347,9 +362,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/bucket_map_join_2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket_map_join_2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/bucket_map_join_2.q.out (working copy)
@@ -136,9 +136,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.table1
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct table1 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 20
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -152,9 +157,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.table1
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct table1 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 20
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.table1
@@ -232,7 +242,6 @@
Fetch Operator
limit: -1
-
PREHOOK: query: select /*+ mapjoin(b) */ count(*) from table1 a join table2 b on a.key=b.key and a.value=b.value
PREHOOK: type: QUERY
PREHOOK: Input: default@table1
Index: ql/src/test/results/clientpositive/sample2.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample2.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/sample2.q.out (working copy)
@@ -88,9 +88,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -103,9 +108,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 11603
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket
Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
===================================================================
--- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy)
@@ -60,9 +60,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -74,9 +79,14 @@
columns.types string:string
#### A masked pattern was here ####
name default.src
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
Index: ql/src/test/results/clientpositive/stats11.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats11.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/stats11.q.out (working copy)
@@ -20,6 +20,41 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
+PREHOOK: query: explain
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+POSTHOOK: query: explain
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+ABSTRACT SYNTAX TREE:
+ (TOK_LOAD '../data/files/srcbucket20.txt' (TOK_TAB (TOK_TABNAME srcbucket_mapjoin_part) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08'))) local)
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+ Stage-1 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-0
+ Copy
+#### A masked pattern was here ####
+
+ Stage: Stage-1
+ Move Operator
+ tables:
+ partition:
+ ds 2008-04-08
+ replace: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcbucket_mapjoin_part
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
@@ -27,24 +62,176 @@
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
+PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08]
+Database: default
+Table: srcbucket_mapjoin_part
+#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 1
+ numRows 0
+ rawDataSize 0
+ totalSize 1358
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 4
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
+PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08]
+Database: default
+Table: srcbucket_mapjoin_part
+#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 2
+ numRows 0
+ rawDataSize 0
+ totalSize 2750
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 4
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
+PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08]
+Database: default
+Table: srcbucket_mapjoin_part
+#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 3
+ numRows 0
+ rawDataSize 0
+ totalSize 4200
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 4
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
+PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+key int None
+value string None
+
+# Partition Information
+# col_name data_type comment
+
+ds string None
+
+# Detailed Partition Information
+Partition Value: [2008-04-08]
+Database: default
+Table: srcbucket_mapjoin_part
+#### A masked pattern was here ####
+Protect Mode: None
+#### A masked pattern was here ####
+Partition Parameters:
+ numFiles 4
+ numRows 0
+ rawDataSize 0
+ totalSize 5812
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: 4
+Bucket Columns: [key]
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -214,9 +401,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -229,9 +421,14 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin
+ numFiles 2
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 2750
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin
@@ -701,10 +898,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -717,10 +919,15 @@
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 1
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
Index: ql/src/test/results/clientpositive/input23.q.out
===================================================================
--- ql/src/test/results/clientpositive/input23.q.out (revision 1370661)
+++ ql/src/test/results/clientpositive/input23.q.out (working copy)
@@ -70,10 +70,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -85,10 +90,15 @@
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 23248
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
Index: ql/src/test/results/compiler/plan/input2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input2.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input2.q.xml (working copy)
@@ -6,7 +6,7 @@
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2714,6 +2770,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2748,6 +2808,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -2764,6 +2828,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2776,6 +2852,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/join3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join3.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join3.q.xml (working copy)
@@ -10,7 +10,7 @@
- Stage-4
+ Stage-5
@@ -38,7 +38,7 @@
- Stage-2
+ Stage-3
@@ -136,7 +136,7 @@
- Stage-3
+ Stage-4
@@ -163,6 +163,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -179,6 +183,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -191,6 +207,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -225,6 +245,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -241,6 +265,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -253,6 +289,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -295,6 +335,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -311,6 +355,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -323,6 +379,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -357,6 +417,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -373,6 +437,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -385,6 +461,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -427,6 +507,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -443,6 +527,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -455,6 +551,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -489,6 +589,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -505,6 +609,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -517,6 +633,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1487,6 +1607,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1503,6 +1627,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1515,6 +1651,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1549,6 +1689,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1565,6 +1709,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1577,6 +1733,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1912,11 +2072,11 @@
true
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input4.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input4.q.xml (working copy)
@@ -10,7 +10,7 @@
- Stage-4
+ Stage-5
@@ -38,7 +38,7 @@
- Stage-2
+ Stage-3
@@ -136,7 +136,7 @@
- Stage-3
+ Stage-4
@@ -163,6 +163,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -179,6 +183,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -191,6 +207,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -225,6 +245,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -241,6 +265,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -253,6 +289,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1020,6 +1060,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1036,6 +1080,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1048,6 +1104,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1082,6 +1142,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1098,6 +1162,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1110,6 +1186,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/join5.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join5.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join5.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -161,6 +201,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -177,6 +221,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -189,6 +245,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -223,6 +283,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -239,6 +303,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -251,6 +327,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1489,6 +1569,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1505,6 +1589,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1517,6 +1613,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1551,6 +1651,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1567,6 +1671,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1579,6 +1695,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2227,11 +2347,11 @@
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input6.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input6.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -548,6 +552,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -560,6 +576,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -594,6 +614,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -610,6 +634,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -622,6 +658,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1068,6 +1108,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1084,6 +1128,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1096,6 +1152,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1130,6 +1190,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1146,6 +1210,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1158,6 +1234,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -45,6 +49,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -61,6 +77,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -95,6 +115,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -111,6 +135,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -127,6 +163,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -877,6 +917,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -893,6 +937,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -909,6 +965,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -943,6 +1003,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -959,6 +1023,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -975,6 +1051,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
Index: ql/src/test/results/compiler/plan/join7.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join7.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join7.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -161,6 +201,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -177,6 +221,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -189,6 +245,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -223,6 +283,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -239,6 +303,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -251,6 +327,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -293,6 +373,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -309,6 +393,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -321,6 +417,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -355,6 +455,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -371,6 +475,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -383,6 +499,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2197,6 +2317,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -2213,6 +2337,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2225,6 +2361,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2259,6 +2399,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -2275,6 +2419,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2287,6 +2443,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -3140,11 +3300,11 @@
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input8.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input8.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input8.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -677,6 +717,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -693,6 +737,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -705,6 +761,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -739,6 +799,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -755,6 +819,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -767,6 +843,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/union.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/union.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/union.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-8
+ Stage-9
@@ -17,7 +17,7 @@
- Stage-2
+ Stage-3
@@ -34,7 +34,7 @@
- Stage-4
+ Stage-5
@@ -274,7 +274,7 @@
- Stage-7
+ Stage-8
@@ -288,7 +288,7 @@
- Stage-6
+ Stage-7
@@ -346,7 +346,7 @@
- Stage-5
+ Stage-6
@@ -400,7 +400,7 @@
- Stage-3
+ Stage-4
@@ -427,6 +427,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -443,6 +447,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -455,6 +471,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -489,6 +509,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -505,6 +529,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -517,6 +553,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -559,6 +599,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -575,6 +619,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -587,6 +643,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -621,6 +681,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -637,6 +701,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -649,6 +725,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1619,6 +1699,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1635,6 +1719,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1647,6 +1743,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1681,6 +1781,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1697,6 +1801,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1709,6 +1825,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/udf4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf4.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/udf4.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
Index: ql/src/test/results/compiler/plan/udf6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -608,6 +648,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -624,6 +668,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -636,6 +692,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -670,6 +730,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -686,6 +750,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -698,6 +774,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/input_part1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -38,6 +38,10 @@
default.srcpart
+ numFiles
+ 4
+
+
columns.types
string:string
@@ -54,6 +58,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -70,6 +86,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 23248
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -104,6 +124,10 @@
default.srcpart
+ numFiles
+ 4
+
+
columns.types
string:string
@@ -120,6 +144,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -136,6 +172,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 23248
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -797,6 +837,10 @@
default.srcpart
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -813,6 +857,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -829,6 +885,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -863,6 +923,10 @@
default.srcpart
+ numFiles
+ 4
+
+
columns.types
string:string
@@ -879,6 +943,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -895,6 +971,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 23248
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/groupby2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby2.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/groupby2.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1120,6 +1160,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1136,6 +1180,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1148,6 +1204,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1182,6 +1242,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1198,6 +1262,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1210,6 +1286,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/groupby4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -769,6 +809,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -785,6 +829,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -797,6 +853,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -831,6 +891,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -847,6 +911,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -859,6 +935,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/groupby6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -769,6 +809,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -785,6 +829,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -797,6 +853,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -831,6 +891,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -847,6 +911,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -859,6 +935,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -548,6 +552,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -598,6 +618,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -614,6 +638,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1267,6 +1307,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -1283,6 +1327,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -1299,6 +1355,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1333,6 +1393,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -1349,6 +1413,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -1365,6 +1441,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
Index: ql/src/test/results/compiler/plan/sample1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -38,6 +38,10 @@
default.srcpart
+ numFiles
+ 4
+
+
columns.types
string:string
@@ -54,6 +58,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -70,6 +86,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 23248
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -104,6 +124,10 @@
default.srcpart
+ numFiles
+ 4
+
+
columns.types
string:string
@@ -120,6 +144,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -136,6 +172,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 23248
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -918,6 +958,10 @@
default.srcpart
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -934,6 +978,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -950,6 +1006,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -984,6 +1044,10 @@
default.srcpart
+ numFiles
+ 4
+
+
columns.types
string:string
@@ -1000,6 +1064,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 4
+
+
partition_columns
ds/hr
@@ -1016,6 +1092,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 23248
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/sample3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample3.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -552,6 +556,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -598,6 +618,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -618,6 +642,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1204,6 +1244,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1224,6 +1268,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1236,6 +1292,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1270,6 +1330,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1290,6 +1354,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1302,6 +1378,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/sample5.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample5.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -552,6 +556,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -598,6 +618,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -618,6 +642,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1191,6 +1231,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1211,6 +1255,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1223,6 +1279,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1257,6 +1317,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1277,6 +1341,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1289,6 +1365,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/sample7.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample7.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -552,6 +556,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -598,6 +618,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -618,6 +642,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1276,6 +1316,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1296,6 +1340,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1308,6 +1364,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1342,6 +1402,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1362,6 +1426,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1374,6 +1450,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/cast1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1102,6 +1142,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1118,6 +1162,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1130,6 +1186,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1164,6 +1224,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1180,6 +1244,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1192,6 +1268,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/input1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input1.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -548,6 +552,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -560,6 +576,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -594,6 +614,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -610,6 +634,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -622,6 +658,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1082,6 +1122,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1098,6 +1142,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1110,6 +1166,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1144,6 +1204,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1160,6 +1224,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1172,6 +1248,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/join2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join2.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join2.q.xml (working copy)
@@ -14,7 +14,7 @@
- Stage-5
+ Stage-6
@@ -42,7 +42,7 @@
- Stage-2
+ Stage-3
@@ -140,7 +140,7 @@
- Stage-4
+ Stage-5
@@ -174,6 +174,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -190,6 +194,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -202,6 +218,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -236,6 +256,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -252,6 +276,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -264,6 +300,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1045,6 +1085,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1061,6 +1105,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1073,6 +1129,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1107,6 +1167,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1123,6 +1187,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1135,6 +1211,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1455,11 +1535,11 @@
true
-
-
-
-
-
+
+
+
+
+
_col4
@@ -1638,7 +1718,7 @@
- Stage-3
+ Stage-4
@@ -1665,6 +1745,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1681,6 +1765,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1693,6 +1789,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1727,6 +1827,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1743,6 +1847,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1755,6 +1871,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1797,6 +1917,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1813,6 +1937,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1825,6 +1961,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1859,6 +1999,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1875,6 +2019,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1887,6 +2043,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2553,6 +2713,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -2569,6 +2733,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2581,6 +2757,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2615,6 +2795,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -2631,6 +2815,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2643,6 +2839,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2799,11 +2999,11 @@
true
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input3.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input3.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-12
+ Stage-13
@@ -21,7 +21,7 @@
- Stage-7
+ Stage-8
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-8
+ Stage-9
@@ -379,7 +379,7 @@
- Stage-11
+ Stage-12
@@ -393,7 +393,7 @@
- Stage-10
+ Stage-11
@@ -451,7 +451,7 @@
- Stage-9
+ Stage-10
@@ -505,7 +505,7 @@
- Stage-18
+ Stage-19
@@ -520,7 +520,7 @@
- Stage-13
+ Stage-14
@@ -548,7 +548,7 @@
- Stage-3
+ Stage-4
@@ -565,7 +565,7 @@
- Stage-14
+ Stage-15
@@ -874,7 +874,7 @@
- Stage-17
+ Stage-18
@@ -888,7 +888,7 @@
- Stage-16
+ Stage-17
@@ -946,7 +946,7 @@
- Stage-15
+ Stage-16
@@ -1000,7 +1000,7 @@
- Stage-24
+ Stage-25
@@ -1015,7 +1015,7 @@
- Stage-19
+ Stage-20
@@ -1043,7 +1043,7 @@
- Stage-4
+ Stage-5
@@ -1060,7 +1060,7 @@
- Stage-20
+ Stage-21
@@ -1373,7 +1373,7 @@
- Stage-23
+ Stage-24
@@ -1387,7 +1387,7 @@
- Stage-22
+ Stage-23
@@ -1454,7 +1454,7 @@
- Stage-21
+ Stage-22
@@ -1508,7 +1508,7 @@
- Stage-29
+ Stage-30
@@ -1519,7 +1519,7 @@
- Stage-5
+ Stage-6
@@ -1536,7 +1536,7 @@
- Stage-25
+ Stage-26
@@ -1759,7 +1759,7 @@
- Stage-28
+ Stage-29
@@ -1773,7 +1773,7 @@
- Stage-27
+ Stage-28
@@ -1831,7 +1831,7 @@
- Stage-26
+ Stage-27
@@ -1885,7 +1885,7 @@
- Stage-6
+ Stage-7
@@ -1912,6 +1912,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1928,6 +1932,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1940,6 +1956,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1974,6 +1994,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1990,6 +2014,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2002,6 +2038,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -3352,6 +3392,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -3368,6 +3412,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -3380,6 +3436,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -3414,6 +3474,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -3430,6 +3494,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -3442,6 +3518,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/join4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join4.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join4.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -161,6 +201,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -177,6 +221,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -189,6 +245,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -223,6 +283,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -239,6 +303,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -251,6 +327,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1489,6 +1569,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1505,6 +1589,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1517,6 +1613,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1551,6 +1651,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1567,6 +1671,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1579,6 +1695,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2227,11 +2347,11 @@
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input5.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input5.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input5.q.xml (working copy)
@@ -10,7 +10,7 @@
- Stage-4
+ Stage-5
@@ -38,7 +38,7 @@
- Stage-2
+ Stage-3
@@ -136,7 +136,7 @@
- Stage-3
+ Stage-4
@@ -163,6 +163,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -179,6 +183,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -195,6 +211,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -229,6 +249,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -245,6 +269,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -261,6 +297,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1050,6 +1090,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -1066,6 +1110,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -1082,6 +1138,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1116,6 +1176,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -1132,6 +1196,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -1148,6 +1224,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
Index: ql/src/test/results/compiler/plan/join6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join6.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join6.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -161,6 +201,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -177,6 +221,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -189,6 +245,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -223,6 +283,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -239,6 +303,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -251,6 +327,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1489,6 +1569,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1505,6 +1589,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1517,6 +1613,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1551,6 +1651,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1567,6 +1671,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1579,6 +1695,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2227,11 +2347,11 @@
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input7.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input7.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input7.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -548,6 +552,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -560,6 +576,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -594,6 +614,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -610,6 +634,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -622,6 +658,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -984,6 +1024,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1000,6 +1044,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1012,6 +1068,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1046,6 +1106,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1062,6 +1126,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1074,6 +1150,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -548,6 +552,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -560,6 +576,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -594,6 +614,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -610,6 +634,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -622,6 +658,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -992,6 +1032,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1008,6 +1052,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1020,6 +1076,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1054,6 +1114,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1070,6 +1134,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1082,6 +1158,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/join8.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join8.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join8.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -161,6 +201,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -177,6 +221,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -189,6 +245,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -223,6 +283,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -239,6 +303,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -251,6 +327,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1571,6 +1651,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1587,6 +1671,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1599,6 +1695,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1633,6 +1733,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1649,6 +1753,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1661,6 +1777,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2424,11 +2544,11 @@
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/results/compiler/plan/input9.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input9.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input9.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -548,6 +552,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -560,6 +576,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -594,6 +614,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -610,6 +634,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -622,6 +658,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1061,6 +1101,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1077,6 +1121,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1089,6 +1145,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1123,6 +1183,10 @@
default.src1
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1139,6 +1203,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1151,6 +1227,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 216
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/udf1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1928,6 +1968,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1944,6 +1988,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1956,6 +2012,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1990,6 +2050,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -2006,6 +2070,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -2018,6 +2094,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/input_testxpath.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -45,6 +49,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -61,6 +77,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -95,6 +115,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -111,6 +135,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -127,6 +163,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -789,6 +829,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -805,6 +849,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -821,6 +877,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -855,6 +915,10 @@
default.src_thrift
+ numFiles
+ 1
+
+
columns.types
@@ -871,6 +935,18 @@
org.apache.thrift.protocol.TBinaryProtocol
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
serialization.class
org.apache.hadoop.hive.serde2.thrift.test.Complex
@@ -887,6 +963,10 @@
org.apache.hadoop.mapred.SequenceFileInputFormat
+ totalSize
+ 1606
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
Index: ql/src/test/results/compiler/plan/groupby1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy)
@@ -10,7 +10,7 @@
- Stage-4
+ Stage-5
@@ -38,7 +38,7 @@
- Stage-2
+ Stage-3
@@ -136,7 +136,7 @@
- Stage-3
+ Stage-4
@@ -163,6 +163,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -179,6 +183,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -191,6 +207,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -225,6 +245,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -241,6 +265,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -253,6 +289,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -999,6 +1039,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1015,6 +1059,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1027,6 +1083,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1061,6 +1121,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1077,6 +1141,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1089,6 +1165,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/udf_case.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -702,6 +742,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -718,6 +762,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -730,6 +786,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -764,6 +824,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -780,6 +844,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -792,6 +868,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/subq.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/subq.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/subq.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-8
+ Stage-9
@@ -17,7 +17,7 @@
- Stage-2
+ Stage-3
@@ -34,7 +34,7 @@
- Stage-4
+ Stage-5
@@ -274,7 +274,7 @@
- Stage-7
+ Stage-8
@@ -288,7 +288,7 @@
- Stage-6
+ Stage-7
@@ -346,7 +346,7 @@
- Stage-5
+ Stage-6
@@ -400,7 +400,7 @@
- Stage-3
+ Stage-4
@@ -427,6 +427,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -443,6 +447,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -455,6 +471,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -489,6 +509,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -505,6 +529,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -517,6 +553,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1107,6 +1147,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1123,6 +1167,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1135,6 +1191,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1169,6 +1229,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1185,6 +1249,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1197,6 +1273,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/groupby3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1322,6 +1362,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1338,6 +1382,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1350,6 +1406,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1384,6 +1444,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1400,6 +1464,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1412,6 +1488,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/groupby5.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -862,6 +902,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -878,6 +922,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -890,6 +946,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -924,6 +984,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -940,6 +1004,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -952,6 +1028,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/udf_when.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -782,6 +822,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -798,6 +842,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -810,6 +866,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -844,6 +904,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -860,6 +924,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -872,6 +948,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/input20.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input20.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/input20.q.xml (working copy)
@@ -2,7 +2,7 @@
#### A masked pattern was here ####
- Stage-3
+ Stage-4
@@ -29,6 +29,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -45,6 +49,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -57,6 +73,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,6 +111,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -107,6 +131,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -119,6 +155,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -886,6 +926,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -902,6 +946,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -914,6 +970,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -948,6 +1008,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -964,6 +1028,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -976,6 +1052,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/sample2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample2.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -552,6 +556,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -598,6 +618,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -618,6 +642,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1194,6 +1234,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1214,6 +1258,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1226,6 +1282,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1260,6 +1320,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1280,6 +1344,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1292,6 +1368,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/sample4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample4.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -552,6 +556,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -598,6 +618,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -618,6 +642,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1194,6 +1234,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1214,6 +1258,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1226,6 +1282,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1260,6 +1320,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1280,6 +1344,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1292,6 +1368,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/sample6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample6.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy)
@@ -6,7 +6,7 @@
- Stage-9
+ Stage-10
@@ -21,7 +21,7 @@
- Stage-4
+ Stage-5
@@ -49,7 +49,7 @@
- Stage-2
+ Stage-3
@@ -66,7 +66,7 @@
- Stage-5
+ Stage-6
@@ -379,7 +379,7 @@
- Stage-8
+ Stage-9
@@ -393,7 +393,7 @@
- Stage-7
+ Stage-8
@@ -451,7 +451,7 @@
- Stage-6
+ Stage-7
@@ -505,7 +505,7 @@
- Stage-3
+ Stage-4
@@ -532,6 +532,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -552,6 +556,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -564,6 +580,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -598,6 +618,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -618,6 +642,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -630,6 +666,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1194,6 +1234,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1214,6 +1258,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1226,6 +1282,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1260,6 +1320,10 @@
default.srcbucket
+ numFiles
+ 2
+
+
columns.types
int:string
@@ -1280,6 +1344,18 @@
1
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
2
@@ -1292,6 +1368,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 11603
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Index: ql/src/test/results/compiler/plan/join1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join1.q.xml (revision 1370661)
+++ ql/src/test/results/compiler/plan/join1.q.xml (working copy)
@@ -10,7 +10,7 @@
- Stage-4
+ Stage-5
@@ -38,7 +38,7 @@
- Stage-2
+ Stage-3
@@ -136,7 +136,7 @@
- Stage-3
+ Stage-4
@@ -163,6 +163,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -179,6 +183,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -191,6 +207,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -225,6 +245,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -241,6 +265,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -253,6 +289,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -295,6 +335,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -311,6 +355,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -323,6 +379,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -357,6 +417,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -373,6 +437,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -385,6 +461,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1079,6 +1159,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1095,6 +1179,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1107,6 +1203,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1141,6 +1241,10 @@
default.src
+ numFiles
+ 1
+
+
columns.types
string:string
@@ -1157,6 +1261,18 @@
key,value
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ numPartitions
+ 0
+
+
bucket_count
-1
@@ -1169,6 +1285,10 @@
org.apache.hadoop.mapred.TextInputFormat
+ totalSize
+ 5812
+
+
file.outputformat
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1489,11 +1609,11 @@
true
-
-
-
-
-
+
+
+
+
+
_col0
Index: ql/src/test/queries/clientpositive/stats9.q
===================================================================
--- ql/src/test/queries/clientpositive/stats9.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats9.q (working copy)
@@ -6,4 +6,4 @@
explain analyze table analyze_srcbucket compute statistics;
analyze table analyze_srcbucket compute statistics;
-describe extended analyze_srcbucket;
+describe formatted analyze_srcbucket;
Index: ql/src/test/queries/clientpositive/stats10.q
===================================================================
--- ql/src/test/queries/clientpositive/stats10.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats10.q (working copy)
@@ -23,6 +23,6 @@
explain analyze table bucket3_1 partition (ds) compute statistics;
analyze table bucket3_1 partition (ds) compute statistics;
-describe extended bucket3_1 partition (ds='1');
-describe extended bucket3_1 partition (ds='2');
-describe extended bucket3_1;
+describe formatted bucket3_1 partition (ds='1');
+describe formatted bucket3_1 partition (ds='2');
+describe formatted bucket3_1;
Index: ql/src/test/queries/clientpositive/stats2.q
===================================================================
--- ql/src/test/queries/clientpositive/stats2.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats2.q (working copy)
@@ -12,10 +12,10 @@
insert overwrite table analyze_t1 partition (ds, hr) select * from srcpart where ds is not null;
-desc extended analyze_t1;
+desc formatted analyze_t1;
explain analyze table analyze_t1 partition (ds, hr) compute statistics;
analyze table analyze_t1 partition (ds, hr) compute statistics;
-describe extended analyze_t1;
+describe formatted analyze_t1;
Index: ql/src/test/queries/clientpositive/stats12.q
===================================================================
--- ql/src/test/queries/clientpositive/stats12.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats12.q (working copy)
@@ -11,9 +11,9 @@
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics;
-desc extended analyze_srcpart;
-desc extended analyze_srcpart partition (ds='2008-04-08', hr=11);
-desc extended analyze_srcpart partition (ds='2008-04-08', hr=12);
-desc extended analyze_srcpart partition (ds='2008-04-09', hr=11);
-desc extended analyze_srcpart partition (ds='2008-04-09', hr=12);
+desc formatted analyze_srcpart;
+desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11);
+desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12);
+desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11);
+desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12);
Index: ql/src/test/queries/clientpositive/stats4.q
===================================================================
--- ql/src/test/queries/clientpositive/stats4.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats4.q (working copy)
@@ -28,13 +28,13 @@
select * from nzhang_part1 where ds is not null and hr is not null;
select * from nzhang_part2 where ds is not null and hr is not null;
-describe extended nzhang_part1 partition(ds='2008-04-08',hr=11);
-describe extended nzhang_part1 partition(ds='2008-04-08',hr=12);
-describe extended nzhang_part2 partition(ds='2008-12-31',hr=11);
-describe extended nzhang_part2 partition(ds='2008-12-31',hr=12);
+describe formatted nzhang_part1 partition(ds='2008-04-08',hr=11);
+describe formatted nzhang_part1 partition(ds='2008-04-08',hr=12);
+describe formatted nzhang_part2 partition(ds='2008-12-31',hr=11);
+describe formatted nzhang_part2 partition(ds='2008-12-31',hr=12);
-describe extended nzhang_part1;
-describe extended nzhang_part2;
+describe formatted nzhang_part1;
+describe formatted nzhang_part2;
drop table nzhang_part1;
drop table nzhang_part2;
Index: ql/src/test/queries/clientpositive/stats6.q
===================================================================
--- ql/src/test/queries/clientpositive/stats6.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats6.q (working copy)
@@ -9,9 +9,9 @@
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
-describe extended analyze_srcpart;
+describe formatted analyze_srcpart;
Index: ql/src/test/queries/clientpositive/stats8.q
===================================================================
--- ql/src/test/queries/clientpositive/stats8.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats8.q (working copy)
@@ -8,26 +8,26 @@
explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
-describe extended analyze_srcpart;
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
+describe formatted analyze_srcpart;
explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics;
analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics;
analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics;
analyze table analyze_srcpart PARTITION(ds, hr) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
-describe extended analyze_srcpart;
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
+describe formatted analyze_srcpart;
Index: ql/src/test/queries/clientpositive/stats18.q
===================================================================
--- ql/src/test/queries/clientpositive/stats18.q (revision 0)
+++ ql/src/test/queries/clientpositive/stats18.q (working copy)
@@ -0,0 +1,21 @@
+set datanucleus.cache.collections=false;
+set hive.stats.autogather=true;
+set hive.merge.mapfiles=false;
+set hive.merge.mapredfiles=false;
+set hive.map.aggr=true;
+
+create table stats_part like srcpart;
+
+insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') select key, value from src;
+
+-- Load a file into a existing partition
+-- Some stats (numFiles, totalSize) should be updated correctly
+-- Some other stats (numRows, rawDataSize) should be cleared
+desc formatted stats_part partition (ds='2010-04-08', hr='13');
+
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13');
+
+desc formatted stats_part partition (ds='2010-04-08', hr='13');
+
+drop table stats_src;
+drop table stats_part;
Index: ql/src/test/queries/clientpositive/stats1.q
===================================================================
--- ql/src/test/queries/clientpositive/stats1.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats1.q (working copy)
@@ -21,4 +21,10 @@
SELECT * FROM tmptable x SORT BY x.key, x.value;
-DESCRIBE EXTENDED tmptable;
+DESCRIBE FORMATTED tmptable;
+
+-- Load a file into a existing table
+-- Some stats (numFiles, totalSize) should be updated correctly
+-- Some other stats (numRows, rawDataSize) should be cleared
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable;
+DESCRIBE FORMATTED tmptable;
\ No newline at end of file
Index: ql/src/test/queries/clientpositive/stats11.q
===================================================================
--- ql/src/test/queries/clientpositive/stats11.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats11.q (working copy)
@@ -6,10 +6,17 @@
load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
+explain
load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+
+desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
Index: ql/src/test/queries/clientpositive/stats3.q
===================================================================
--- ql/src/test/queries/clientpositive/stats3.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats3.q (working copy)
@@ -4,8 +4,13 @@
drop table hive_test_dst;
create table hive_test_src ( col1 string ) stored as textfile ;
+explain extended
load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+
+desc formatted hive_test_src;
+
create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src ;
select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part';
@@ -21,7 +26,7 @@
select * from hive_test_dst where pcol1='test_part' and pcol2='test_part';
select * from hive_test_dst where pcol1='test_Part';
-describe extended hive_test_dst;
+describe formatted hive_test_dst;
drop table hive_test_src;
drop table hive_test_dst;
Index: ql/src/test/queries/clientpositive/stats13.q
===================================================================
--- ql/src/test/queries/clientpositive/stats13.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats13.q (working copy)
@@ -11,12 +11,12 @@
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
-desc extended analyze_srcpart;
-desc extended analyze_srcpart partition (ds='2008-04-08', hr=11);
-desc extended analyze_srcpart partition (ds='2008-04-08', hr=12);
-desc extended analyze_srcpart partition (ds='2008-04-09', hr=11);
-desc extended analyze_srcpart partition (ds='2008-04-09', hr=12);
+desc formatted analyze_srcpart;
+desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11);
+desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12);
+desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11);
+desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12);
create table analyze_srcpart2 like analyze_srcpart;
-desc extended analyze_srcpart2;
+desc formatted analyze_srcpart2;
Index: ql/src/test/queries/clientpositive/stats5.q
===================================================================
--- ql/src/test/queries/clientpositive/stats5.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats5.q (working copy)
@@ -7,4 +7,4 @@
analyze table analyze_src compute statistics;
-describe extended analyze_src;
+describe formatted analyze_src;
Index: ql/src/test/queries/clientpositive/stats15.q
===================================================================
--- ql/src/test/queries/clientpositive/stats15.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats15.q (working copy)
@@ -24,4 +24,4 @@
desc formatted stats_part;
drop table stats_src;
-drop table stats_part;
\ No newline at end of file
+drop table stats_part;
Index: ql/src/test/queries/clientpositive/stats7.q
===================================================================
--- ql/src/test/queries/clientpositive/stats7.q (revision 1370661)
+++ ql/src/test/queries/clientpositive/stats7.q (working copy)
@@ -10,7 +10,7 @@
analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
+describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
-describe extended analyze_srcpart;
+describe formatted analyze_srcpart;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (revision 1370661)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (working copy)
@@ -329,6 +329,15 @@
work.getAggKey(), atomic);
statsAggregator.cleanUp(work.getAggKey());
}
+ // The collectable stats for the aggregator needs to be cleared.
+ // For eg. if a file is being loaded, the old number of rows are not valid
+ else if (work.isClearAggregatorStats()) {
+ for (String statType : collectableStats) {
+ if (parameters.containsKey(statType)) {
+ tblStats.setStat(statType, 0L);
+ }
+ }
+ }
} else {
// Partitioned table:
// Need to get the old stats of the partition
@@ -368,7 +377,16 @@
parameters, partitionID, atomic);
} else {
for (String statType : collectableStats) {
- newPartStats.setStat(statType, currentValues.get(statType));
+ // The collectable stats for the aggregator needs to be cleared.
+ // For eg. if a file is being loaded, the old number of rows are not valid
+ if (work.isClearAggregatorStats()) {
+ if (parameters.containsKey(statType)) {
+ newPartStats.setStat(statType, 0L);
+ }
+ }
+ else {
+ newPartStats.setStat(statType, currentValues.get(statType));
+ }
}
}
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (revision 1370661)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (working copy)
@@ -30,12 +30,19 @@
public class StatsWork implements Serializable {
private static final long serialVersionUID = 1L;
- private tableSpec tableSpecs; // source table spec -- for TableScanOperator
- private LoadTableDesc loadTableDesc; // same as MoveWork.loadTableDesc -- for FileSinkOperator
- private LoadFileDesc loadFileDesc; // same as MoveWork.loadFileDesc -- for FileSinkOperator
- private String aggKey; // aggregation key prefix
- private boolean statsReliable; // are stats completely reliable
+ private tableSpec tableSpecs; // source table spec -- for TableScanOperator
+ private LoadTableDesc loadTableDesc; // same as MoveWork.loadTableDesc -- for FileSinkOperator
+ private LoadFileDesc loadFileDesc; // same as MoveWork.loadFileDesc -- for FileSinkOperator
+ private String aggKey; // aggregation key prefix
+ private boolean statsReliable; // are stats completely reliable
+ // If stats aggregator is not present, clear the current aggregator stats.
+ // For eg. if a merge is being performed, stats already collected by aggregator (numrows etc.)
+ // are still valid. However, if a load file is being performed, the old stats collected by
+ // aggregator are not valid. It might be a good idea to clear them instead of leaving wrong
+ // and old stats.
+ private boolean clearAggregatorStats = false;
+
private boolean noStatsAggregator = false;
public StatsWork() {
@@ -93,4 +100,12 @@
public void setStatsReliable(boolean statsReliable) {
this.statsReliable = statsReliable;
}
+
+ public boolean isClearAggregatorStats() {
+ return clearAggregatorStats;
+ }
+
+ public void setClearAggregatorStats(boolean clearAggregatorStats) {
+ this.clearAggregatorStats = clearAggregatorStats;
+ }
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (revision 1370661)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (working copy)
@@ -44,7 +44,7 @@
import org.apache.hadoop.hive.ql.plan.CopyWork;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MoveWork;
-import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.hive.ql.plan.StatsWork;
/**
* LoadSemanticAnalyzer.
@@ -259,30 +259,49 @@
LoadTableDesc loadTableWork = new LoadTableDesc(fromURI.toString(),
loadTmpPath, Utilities.getTableDesc(ts.tableHandle), partSpec, isOverWrite);
+ Task extends Serializable> childTask = TaskFactory.get(new MoveWork(getInputs(),
+ getOutputs(), loadTableWork, null, true), conf);
if (rTask != null) {
- rTask.addDependentTask(TaskFactory.get(new MoveWork(getInputs(),
- getOutputs(), loadTableWork, null, true), conf));
+ rTask.addDependentTask(childTask);
} else {
- rTask = TaskFactory.get(new MoveWork(getInputs(), getOutputs(),
- loadTableWork, null, true), conf);
+ rTask = childTask;
}
rootTasks.add(rTask);
+
+ // The user asked for stats to be collected.
+ // Some stats like number of rows require a scan of the data
+ // However, some other stats, like number of files, do not require a complete scan
+ // Update the stats which do not require a complete scan.
+ Task extends Serializable> statTask = null;
+ if (conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+ StatsWork statDesc = new StatsWork(loadTableWork);
+ statDesc.setNoStatsAggregator(true);
+ statDesc.setClearAggregatorStats(true);
+ statDesc.setStatsReliable(conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_RELIABLE));
+ statTask = TaskFactory.get(statDesc, conf);
+ }
+
+ // HIVE-3334 has been filed for load file with index auto update
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEINDEXAUTOUPDATE)) {
IndexUpdater indexUpdater = new IndexUpdater(loadTableWork, getInputs(), conf);
try {
List> indexUpdateTasks = indexUpdater.generateUpdateTasks();
+
for (Task extends Serializable> updateTask : indexUpdateTasks) {
- //LOAD DATA will either have a copy & move or just a move, we always want the update to be dependent on the move
- if (rTask.getChildren() == null || rTask.getChildren().size() == 0) {
- rTask.addDependentTask(updateTask);
- } else {
- ((Task extends Serializable>)rTask.getChildren().get(0)).addDependentTask(updateTask);
+ //LOAD DATA will either have a copy & move or just a move,
+ // we always want the update to be dependent on the move
+ childTask.addDependentTask(updateTask);
+ if (statTask != null) {
+ updateTask.addDependentTask(statTask);
}
}
} catch (HiveException e) {
console.printInfo("WARNING: could not auto-update stale indexes, indexes are not out of sync");
}
}
+ else if (statTask != null) {
+ childTask.addDependentTask(statTask);
+ }
}
}