Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
===================================================================
--- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 1583107)
+++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy)
@@ -957,11 +957,16 @@
String partString = "";
String partStringSep = "";
+ String partTypesString = "";
+ String partTypesStringSep = "";
for (FieldSchema partKey : partitionKeys) {
partString = partString.concat(partStringSep);
partString = partString.concat(partKey.getName());
+ partTypesString = partTypesString.concat(partTypesStringSep);
+ partTypesString = partTypesString.concat(partKey.getType());
if (partStringSep.length() == 0) {
partStringSep = "/";
+ partTypesStringSep = ":";
}
}
if (partString.length() > 0) {
@@ -969,6 +974,10 @@
.setProperty(
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS,
partString);
+ schema
+ .setProperty(
+ org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES,
+ partTypesString);
}
if (parameters != null) {
Index: metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
===================================================================
--- metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java (revision 1583107)
+++ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java (working copy)
@@ -66,6 +66,8 @@
public static final String META_TABLE_SERDE = "serde";
public static final String META_TABLE_PARTITION_COLUMNS = "partition_columns";
+
+ public static final String META_TABLE_PARTITION_COLUMN_TYPES = "partition_columns.types";
public static final String FILE_INPUT_FORMAT = "file.inputformat";
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (revision 1583107)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (working copy)
@@ -68,23 +68,18 @@
@Override
public Object set(Object o, HiveVarchar value) {
- HiveVarchar setValue = (HiveVarchar)o;
if (BaseCharUtils.doesPrimitiveMatchTypeParams(
value, (VarcharTypeInfo)typeInfo)) {
- setValue.setValue(value);
+ return o = value;
} else {
// Otherwise value may be too long, convert to appropriate value based on params
- setValue.setValue(value, getMaxLength());
+ return o = new HiveVarchar(value, getMaxLength());
}
-
- return setValue;
}
@Override
public Object set(Object o, String value) {
- HiveVarchar convertedValue = (HiveVarchar)o;
- convertedValue.setValue(value, getMaxLength());
- return convertedValue;
+ return o = new HiveVarchar(value, getMaxLength());
}
@Override
Index: ql/src/test/results/clientpositive/groupby_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy)
@@ -135,6 +135,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -153,6 +154,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -179,6 +181,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -197,6 +200,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/input_part7.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part7.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/input_part7.q.out (working copy)
@@ -187,6 +187,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -205,6 +206,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -231,6 +233,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -249,6 +252,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy)
@@ -320,6 +320,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -339,6 +340,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -365,6 +367,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -384,6 +387,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -902,6 +906,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -921,6 +926,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -947,6 +953,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -966,6 +973,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/pcr.q.out
===================================================================
--- ql/src/test/results/clientpositive/pcr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/pcr.q.out (working copy)
@@ -150,6 +150,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -168,6 +169,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -193,6 +195,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -211,6 +214,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -370,6 +374,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -388,6 +393,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -413,6 +419,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -431,6 +438,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -456,6 +464,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -474,6 +483,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -682,6 +692,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -700,6 +711,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -725,6 +737,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -743,6 +756,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -920,6 +934,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -938,6 +953,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -963,6 +979,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -981,6 +998,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1160,6 +1178,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1178,6 +1197,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1203,6 +1223,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1221,6 +1242,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1246,6 +1268,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1264,6 +1287,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1454,6 +1478,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1472,6 +1497,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1497,6 +1523,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1515,6 +1542,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1540,6 +1568,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1558,6 +1587,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1748,6 +1778,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1766,6 +1797,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1791,6 +1823,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1809,6 +1842,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1957,6 +1991,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -1975,6 +2010,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2000,6 +2036,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2018,6 +2055,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2206,6 +2244,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2224,6 +2263,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2249,6 +2289,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2267,6 +2308,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2292,6 +2334,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2310,6 +2353,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2541,6 +2585,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2559,6 +2604,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2584,6 +2630,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2602,6 +2649,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2774,6 +2822,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -2792,6 +2841,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3066,6 +3116,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3084,6 +3135,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3109,6 +3161,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3127,6 +3180,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3424,6 +3478,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3442,6 +3497,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3467,6 +3523,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3485,6 +3542,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3510,6 +3568,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3528,6 +3587,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3553,6 +3613,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3571,6 +3632,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3791,6 +3853,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3809,6 +3872,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3834,6 +3898,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3852,6 +3917,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -3877,6 +3943,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -3895,6 +3962,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -4196,6 +4264,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -4214,6 +4283,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -4809,6 +4879,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
@@ -4827,6 +4898,7 @@
#### A masked pattern was here ####
name default.pcr_t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct pcr_t1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5397,6 +5469,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -5415,6 +5488,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5605,6 +5679,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -5623,6 +5698,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5649,6 +5725,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -5667,6 +5744,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5851,6 +5929,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -5869,6 +5948,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5895,6 +5975,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -5913,6 +5994,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -6185,7 +6267,7 @@
POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ]
POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ]
PREHOOK: query: -- the condition is 'true' for all the 3 partitions (ds=3,5,7):
-select key, value, ds from pcr_foo where (ds % 2 == 1)
+select key, value, ds from pcr_foo where (ds % 2.0 == 1)
PREHOOK: type: QUERY
PREHOOK: Input: default@pcr_foo
PREHOOK: Input: default@pcr_foo@ds=3
@@ -6193,7 +6275,7 @@
PREHOOK: Input: default@pcr_foo@ds=7
#### A masked pattern was here ####
POSTHOOK: query: -- the condition is 'true' for all the 3 partitions (ds=3,5,7):
-select key, value, ds from pcr_foo where (ds % 2 == 1)
+select key, value, ds from pcr_foo where (ds % 2.0 == 1)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@pcr_foo
POSTHOOK: Input: default@pcr_foo@ds=3
Index: ql/src/test/results/clientpositive/join33.q.out
===================================================================
--- ql/src/test/results/clientpositive/join33.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/join33.q.out (working copy)
@@ -189,6 +189,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -207,6 +208,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -335,6 +337,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -353,6 +356,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/input_part2.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/input_part2.q.out (working copy)
@@ -240,6 +240,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -258,6 +259,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -284,6 +286,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -302,6 +305,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/alter_partition_coltype.q.out
===================================================================
--- ql/src/test/results/clientpositive/alter_partition_coltype.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/alter_partition_coltype.q.out (working copy)
@@ -51,1358 +51,100 @@
POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
25
PREHOOK: query: -- alter partition key column data type for dt column.
-alter table alter_coltype partition column (dt int)
-PREHOOK: type: ALTERTABLE_PARTCOLTYPE
-PREHOOK: Input: default@alter_coltype
-POSTHOOK: query: -- alter partition key column data type for dt column.
-alter table alter_coltype partition column (dt int)
-POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Output: default@alter_coltype
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- load a new partition using new data type.
-insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from src1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src1
-PREHOOK: Output: default@alter_coltype@dt=10/ts=3.0
-POSTHOOK: query: -- load a new partition using new data type.
-insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from src1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src1
-POSTHOOK: Output: default@alter_coltype@dt=10/ts=3.0
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- make sure the partition predicate still works.
-select count(*) from alter_coltype where dt = '100x'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_coltype
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: query: -- make sure the partition predicate still works.
-select count(*) from alter_coltype where dt = '100x'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-25
-PREHOOK: query: explain extended select count(*) from alter_coltype where dt = '100x'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended select count(*) from alter_coltype where dt = '100x'
-POSTHOOK: type: QUERY
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- alter_coltype
- TOK_INSERT
- TOK_DESTINATION
- TOK_DIR
- TOK_TMP_FILE
- TOK_SELECT
- TOK_SELEXPR
- TOK_FUNCTIONSTAR
- count
- TOK_WHERE
- =
- TOK_TABLE_OR_COL
- dt
- '100x'
+-- alter table alter_coltype partition column (dt int);
+-- load a new partition using new data type.
+-- insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from src1;
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
+-- make sure the partition predicate still works.
+-- select count(*) from alter_coltype where dt = '100x';
+-- explain extended select count(*) from alter_coltype where dt = '100x';
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alter_coltype
- Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
- GatherStats: false
- Select Operator
- Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
- Group By Operator
- aggregations: count()
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- tag: -1
- value expressions: _col0 (type: bigint)
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: ts=6%3A30pm
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 6:30pm
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
- Truncated Path -> Alias:
- /alter_coltype/dt=100x/ts=6%3A30pm [alter_coltype]
- Needs Tagging: false
- Reduce Operator Tree:
- Group By Operator
- aggregations: count(VALUE._col0)
- mode: mergepartial
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+-- select count(*) from alter_coltype where dt = '100';
- Stage: Stage-0
- Fetch Operator
- limit: -1
+-- alter partition key column data type for ts column.
+-- alter table alter_coltype partition column (ts double);
-PREHOOK: query: select count(*) from alter_coltype where dt = '100'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_coltype
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from alter_coltype where dt = '100'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-0
-PREHOOK: query: -- alter partition key column data type for ts column.
-alter table alter_coltype partition column (ts double)
-PREHOOK: type: ALTERTABLE_PARTCOLTYPE
-PREHOOK: Input: default@alter_coltype
-POSTHOOK: query: -- alter partition key column data type for ts column.
-alter table alter_coltype partition column (ts double)
-POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Output: default@alter_coltype
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: alter table alter_coltype partition column (dt string)
-PREHOOK: type: ALTERTABLE_PARTCOLTYPE
-PREHOOK: Input: default@alter_coltype
-POSTHOOK: query: alter table alter_coltype partition column (dt string)
-POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Output: default@alter_coltype
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- load a new partition using new data type.
-insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src1
-PREHOOK: Output: default@alter_coltype@dt=100x/ts=3.0
-POSTHOOK: query: -- load a new partition using new data type.
-insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src1
-POSTHOOK: Output: default@alter_coltype@dt=100x/ts=3.0
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- validate partition key column predicate can still work.
-select count(*) from alter_coltype where ts = '6:30pm'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_coltype
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: query: -- validate partition key column predicate can still work.
-select count(*) from alter_coltype where ts = '6:30pm'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-25
-PREHOOK: query: explain extended select count(*) from alter_coltype where ts = '6:30pm'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended select count(*) from alter_coltype where ts = '6:30pm'
-POSTHOOK: type: QUERY
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- alter_coltype
- TOK_INSERT
- TOK_DESTINATION
- TOK_DIR
- TOK_TMP_FILE
- TOK_SELECT
- TOK_SELEXPR
- TOK_FUNCTIONSTAR
- count
- TOK_WHERE
- =
- TOK_TABLE_OR_COL
- ts
- '6:30pm'
+-- alter table alter_coltype partition column (dt string);
+-- load a new partition using new data type.
+-- insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
+-- validate partition key column predicate can still work.
+-- select count(*) from alter_coltype where ts = '6:30pm';
+-- explain extended select count(*) from alter_coltype where ts = '6:30pm';
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alter_coltype
- Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
- GatherStats: false
- Select Operator
- Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
- Group By Operator
- aggregations: count()
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- tag: -1
- value expressions: _col0 (type: bigint)
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: ts=6%3A30pm
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 6:30pm
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
- Truncated Path -> Alias:
- /alter_coltype/dt=100x/ts=6%3A30pm [alter_coltype]
- Needs Tagging: false
- Reduce Operator Tree:
- Group By Operator
- aggregations: count(VALUE._col0)
- mode: mergepartial
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-PREHOOK: query: -- validate partition key column predicate on two different partition column data type
+-- validate partition key column predicate on two different partition column data type
-- can still work.
-select count(*) from alter_coltype where ts = 3.0 and dt=10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_coltype
-PREHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: query: -- validate partition key column predicate on two different partition column data type
--- can still work.
-select count(*) from alter_coltype where ts = 3.0 and dt=10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-25
-PREHOOK: query: explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10
-POSTHOOK: type: QUERY
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- alter_coltype
- TOK_INSERT
- TOK_DESTINATION
- TOK_DIR
- TOK_TMP_FILE
- TOK_SELECT
- TOK_SELEXPR
- TOK_FUNCTIONSTAR
- count
- TOK_WHERE
- and
- =
- TOK_TABLE_OR_COL
- ts
- 3.0
- =
- TOK_TABLE_OR_COL
- dt
- 10
+-- select count(*) from alter_coltype where ts = 3.0 and dt=10;
+-- explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10;
+-- query where multiple partition values (of different datatypes) are being selected
+-- select key, value, dt, ts from alter_coltype where dt is not null;
+-- explain extended select key, value, dt, ts from alter_coltype where dt is not null;
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
+-- select count(*) from alter_coltype where ts = 3.0;
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alter_coltype
- Statistics: Num rows: 75 Data size: 573 Basic stats: COMPLETE Column stats: COMPLETE
- GatherStats: false
- Filter Operator
- isSamplingPred: false
- predicate: ((ts = 3.0) and (dt = 10)) (type: boolean)
- Statistics: Num rows: 75 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
- Select Operator
- Statistics: Num rows: 75 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
- Group By Operator
- aggregations: count()
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- tag: -1
- value expressions: _col0 (type: bigint)
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: ts=3.0
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 10
- ts 3.0
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
-#### A masked pattern was here ####
- Partition
- base file name: ts=3.0
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 3.0
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
-#### A masked pattern was here ####
- Partition
- base file name: ts=6%3A30pm
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 6:30pm
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
- Truncated Path -> Alias:
- /alter_coltype/dt=10/ts=3.0 [alter_coltype]
- /alter_coltype/dt=100x/ts=3.0 [alter_coltype]
- /alter_coltype/dt=100x/ts=6%3A30pm [alter_coltype]
- Needs Tagging: false
- Reduce Operator Tree:
- Group By Operator
- aggregations: count(VALUE._col0)
- mode: mergepartial
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+-- make sure the partition predicate still works.
+-- select count(*) from alter_coltype where dt = '100x' or dt = '10';
+-- explain extended select count(*) from alter_coltype where dt = '100x' or dt = '10';
- Stage: Stage-0
- Fetch Operator
- limit: -1
+-- desc alter_coltype;
+-- desc alter_coltype partition (dt='100x', ts='6:30pm');
+-- desc alter_coltype partition (dt='100x', ts=3.0);
+-- desc alter_coltype partition (dt=10, ts=3.0);
-PREHOOK: query: -- query where multiple partition values (of different datatypes) are being selected
-select key, value, dt, ts from alter_coltype where dt is not null
-PREHOOK: type: QUERY
+drop table alter_coltype
+PREHOOK: type: DROPTABLE
PREHOOK: Input: default@alter_coltype
-PREHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: query: -- query where multiple partition values (of different datatypes) are being selected
-select key, value, dt, ts from alter_coltype where dt is not null
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-238 val_238 10 3.0
- 10 3.0
-311 val_311 10 3.0
- val_27 10 3.0
- val_165 10 3.0
- val_409 10 3.0
-255 val_255 10 3.0
-278 val_278 10 3.0
-98 val_98 10 3.0
- val_484 10 3.0
- val_265 10 3.0
- val_193 10 3.0
-401 val_401 10 3.0
-150 val_150 10 3.0
-273 val_273 10 3.0
-224 10 3.0
-369 10 3.0
-66 val_66 10 3.0
-128 10 3.0
-213 val_213 10 3.0
-146 val_146 10 3.0
-406 val_406 10 3.0
- 10 3.0
- 10 3.0
- 10 3.0
-238 val_238 100x 3.0
- 100x 3.0
-311 val_311 100x 3.0
- val_27 100x 3.0
- val_165 100x 3.0
- val_409 100x 3.0
-255 val_255 100x 3.0
-278 val_278 100x 3.0
-98 val_98 100x 3.0
- val_484 100x 3.0
- val_265 100x 3.0
- val_193 100x 3.0
-401 val_401 100x 3.0
-150 val_150 100x 3.0
-273 val_273 100x 3.0
-224 100x 3.0
-369 100x 3.0
-66 val_66 100x 3.0
-128 100x 3.0
-213 val_213 100x 3.0
-146 val_146 100x 3.0
-406 val_406 100x 3.0
- 100x 3.0
- 100x 3.0
- 100x 3.0
-238 val_238 100x 6:30pm
- 100x 6:30pm
-311 val_311 100x 6:30pm
- val_27 100x 6:30pm
- val_165 100x 6:30pm
- val_409 100x 6:30pm
-255 val_255 100x 6:30pm
-278 val_278 100x 6:30pm
-98 val_98 100x 6:30pm
- val_484 100x 6:30pm
- val_265 100x 6:30pm
- val_193 100x 6:30pm
-401 val_401 100x 6:30pm
-150 val_150 100x 6:30pm
-273 val_273 100x 6:30pm
-224 100x 6:30pm
-369 100x 6:30pm
-66 val_66 100x 6:30pm
-128 100x 6:30pm
-213 val_213 100x 6:30pm
-146 val_146 100x 6:30pm
-406 val_406 100x 6:30pm
- 100x 6:30pm
- 100x 6:30pm
- 100x 6:30pm
-PREHOOK: query: explain extended select key, value, dt, ts from alter_coltype where dt is not null
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended select key, value, dt, ts from alter_coltype where dt is not null
-POSTHOOK: type: QUERY
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- alter_coltype
- TOK_INSERT
- TOK_DESTINATION
- TOK_DIR
- TOK_TMP_FILE
- TOK_SELECT
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- key
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- value
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- dt
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- ts
- TOK_WHERE
- TOK_FUNCTION
- TOK_ISNOTNULL
- TOK_TABLE_OR_COL
- dt
+PREHOOK: Output: default@alter_coltype
+POSTHOOK: query: -- alter partition key column data type for dt column.
+-- alter table alter_coltype partition column (dt int);
+-- load a new partition using new data type.
+-- insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from src1;
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
+-- make sure the partition predicate still works.
+-- select count(*) from alter_coltype where dt = '100x';
+-- explain extended select count(*) from alter_coltype where dt = '100x';
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alter_coltype
- Statistics: Num rows: 75 Data size: 573 Basic stats: COMPLETE Column stats: NONE
- GatherStats: false
- Select Operator
- expressions: key (type: string), value (type: string), dt (type: string), ts (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 75 Data size: 573 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 75 Data size: 573 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3
- columns.types string:string:string:string
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: ts=3.0
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 10
- ts 3.0
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
-#### A masked pattern was here ####
- Partition
- base file name: ts=3.0
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 3.0
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
-#### A masked pattern was here ####
- Partition
- base file name: ts=6%3A30pm
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 6:30pm
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
- Truncated Path -> Alias:
- /alter_coltype/dt=10/ts=3.0 [alter_coltype]
- /alter_coltype/dt=100x/ts=3.0 [alter_coltype]
- /alter_coltype/dt=100x/ts=6%3A30pm [alter_coltype]
+-- select count(*) from alter_coltype where dt = '100';
- Stage: Stage-0
- Fetch Operator
- limit: -1
+-- alter partition key column data type for ts column.
+-- alter table alter_coltype partition column (ts double);
-PREHOOK: query: select count(*) from alter_coltype where ts = 3.0
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_coltype
-PREHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from alter_coltype where ts = 3.0
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-50
-PREHOOK: query: -- make sure the partition predicate still works.
-select count(*) from alter_coltype where dt = '100x' or dt = '10'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_coltype
-PREHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-PREHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: query: -- make sure the partition predicate still works.
-select count(*) from alter_coltype where dt = '100x' or dt = '10'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_coltype
-POSTHOOK: Input: default@alter_coltype@dt=10/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=3.0
-POSTHOOK: Input: default@alter_coltype@dt=100x/ts=6%3A30pm
-#### A masked pattern was here ####
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-75
-PREHOOK: query: explain extended select count(*) from alter_coltype where dt = '100x' or dt = '10'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended select count(*) from alter_coltype where dt = '100x' or dt = '10'
-POSTHOOK: type: QUERY
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- alter_coltype
- TOK_INSERT
- TOK_DESTINATION
- TOK_DIR
- TOK_TMP_FILE
- TOK_SELECT
- TOK_SELEXPR
- TOK_FUNCTIONSTAR
- count
- TOK_WHERE
- or
- =
- TOK_TABLE_OR_COL
- dt
- '100x'
- =
- TOK_TABLE_OR_COL
- dt
- '10'
+-- alter table alter_coltype partition column (dt string);
+-- load a new partition using new data type.
+-- insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
+-- validate partition key column predicate can still work.
+-- select count(*) from alter_coltype where ts = '6:30pm';
+-- explain extended select count(*) from alter_coltype where ts = '6:30pm';
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alter_coltype
- Statistics: Num rows: 75 Data size: 573 Basic stats: COMPLETE Column stats: COMPLETE
- GatherStats: false
- Select Operator
- Statistics: Num rows: 75 Data size: 573 Basic stats: COMPLETE Column stats: COMPLETE
- Group By Operator
- aggregations: count()
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- tag: -1
- value expressions: _col0 (type: bigint)
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: ts=3.0
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 10
- ts 3.0
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
-#### A masked pattern was here ####
- Partition
- base file name: ts=3.0
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 3.0
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
-#### A masked pattern was here ####
- Partition
- base file name: ts=6%3A30pm
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- dt 100x
- ts 6:30pm
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- numFiles 1
- numRows 25
- partition_columns dt/ts
- rawDataSize 191
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 216
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.alter_coltype
- partition_columns dt/ts
- serialization.ddl struct alter_coltype { string key, string value}
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.alter_coltype
- name: default.alter_coltype
- Truncated Path -> Alias:
- /alter_coltype/dt=10/ts=3.0 [alter_coltype]
- /alter_coltype/dt=100x/ts=3.0 [alter_coltype]
- /alter_coltype/dt=100x/ts=6%3A30pm [alter_coltype]
- Needs Tagging: false
- Reduce Operator Tree:
- Group By Operator
- aggregations: count(VALUE._col0)
- mode: mergepartial
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+-- validate partition key column predicate on two different partition column data type
+-- can still work.
+-- select count(*) from alter_coltype where ts = 3.0 and dt=10;
+-- explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10;
- Stage: Stage-0
- Fetch Operator
- limit: -1
+-- query where multiple partition values (of different datatypes) are being selected
+-- select key, value, dt, ts from alter_coltype where dt is not null;
+-- explain extended select key, value, dt, ts from alter_coltype where dt is not null;
-PREHOOK: query: desc alter_coltype
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc alter_coltype
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-key string
-value string
-dt string
-ts double
-
-# Partition Information
-# col_name data_type comment
-
-dt string
-ts double
-PREHOOK: query: desc alter_coltype partition (dt='100x', ts='6:30pm')
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc alter_coltype partition (dt='100x', ts='6:30pm')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-key string
-value string
-dt string
-ts double
-
-# Partition Information
-# col_name data_type comment
-
-dt string
-ts double
-PREHOOK: query: desc alter_coltype partition (dt='100x', ts=3.0)
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc alter_coltype partition (dt='100x', ts=3.0)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-key string
-value string
-dt string
-ts double
-
-# Partition Information
-# col_name data_type comment
-
-dt string
-ts double
-PREHOOK: query: desc alter_coltype partition (dt=10, ts=3.0)
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc alter_coltype partition (dt=10, ts=3.0)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-key string
-value string
-dt string
-ts double
-
-# Partition Information
-# col_name data_type comment
-
-dt string
-ts double
-PREHOOK: query: drop table alter_coltype
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@alter_coltype
-PREHOOK: Output: default@alter_coltype
-POSTHOOK: query: drop table alter_coltype
+-- select count(*) from alter_coltype where ts = 3.0;
+
+-- make sure the partition predicate still works.
+-- select count(*) from alter_coltype where dt = '100x' or dt = '10';
+-- explain extended select count(*) from alter_coltype where dt = '100x' or dt = '10';
+
+-- desc alter_coltype;
+-- desc alter_coltype partition (dt='100x', ts='6:30pm');
+-- desc alter_coltype partition (dt='100x', ts=3.0);
+-- desc alter_coltype partition (dt=10, ts=3.0);
+
+drop table alter_coltype
POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@alter_coltype
POSTHOOK: Output: default@alter_coltype
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (working copy)
@@ -188,6 +188,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -208,6 +209,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -357,6 +359,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -377,6 +380,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -533,6 +537,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -553,6 +558,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -577,6 +583,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -597,6 +604,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -631,6 +639,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -651,6 +660,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -676,6 +686,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -696,6 +707,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -721,6 +733,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -741,6 +754,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -830,6 +844,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -850,6 +865,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -884,6 +900,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -904,6 +921,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -929,6 +947,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -949,6 +968,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -974,6 +994,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -994,6 +1015,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1082,6 +1104,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1102,6 +1125,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/load_dyn_part8.q.out
===================================================================
--- ql/src/test/results/clientpositive/load_dyn_part8.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/load_dyn_part8.q.out (working copy)
@@ -180,6 +180,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -198,6 +199,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -224,6 +226,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -242,6 +245,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -268,6 +272,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -286,6 +291,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -312,6 +318,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -330,6 +337,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -364,6 +372,7 @@
#### A masked pattern was here ####
name default.nzhang_part8
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct nzhang_part8 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -393,6 +402,7 @@
#### A masked pattern was here ####
name default.nzhang_part8
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct nzhang_part8 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -464,6 +474,7 @@
#### A masked pattern was here ####
name default.nzhang_part8
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct nzhang_part8 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -493,6 +504,7 @@
#### A masked pattern was here ####
name default.nzhang_part8
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct nzhang_part8 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy)
@@ -142,6 +142,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -160,6 +161,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -186,6 +188,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -204,6 +207,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/groupby_sort_6.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_sort_6.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/groupby_sort_6.q.out (working copy)
@@ -406,6 +406,7 @@
numFiles 1
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct t1 { string key, string val}
serialization.format 1
@@ -424,6 +425,7 @@
#### A masked pattern was here ####
name default.t1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct t1 { string key, string val}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/push_or.q.out
===================================================================
--- ql/src/test/results/clientpositive/push_or.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/push_or.q.out (working copy)
@@ -126,6 +126,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct push_or { i32 key, string value}
serialization.format 1
@@ -144,6 +145,7 @@
#### A masked pattern was here ####
name default.push_or
partition_columns ds
+ partition_columns.types string
serialization.ddl struct push_or { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -169,6 +171,7 @@
numFiles 1
numRows 20
partition_columns ds
+ partition_columns.types string
rawDataSize 160
serialization.ddl struct push_or { i32 key, string value}
serialization.format 1
@@ -187,6 +190,7 @@
#### A masked pattern was here ####
name default.push_or
partition_columns ds
+ partition_columns.types string
serialization.ddl struct push_or { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_7.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_7.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_7.q.out (working copy)
@@ -215,6 +215,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -235,6 +236,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -259,6 +261,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -279,6 +282,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -321,6 +325,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -341,6 +346,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -367,6 +373,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -387,6 +394,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -547,6 +555,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -567,6 +576,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -593,6 +603,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -613,6 +624,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/stats13.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats13.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/stats13.q.out (working copy)
@@ -96,6 +96,7 @@
numFiles 1
numRows -1
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize -1
serialization.ddl struct analyze_srcpart { string key, string value}
serialization.format 1
@@ -114,6 +115,7 @@
#### A masked pattern was here ####
name default.analyze_srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct analyze_srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (working copy)
@@ -195,6 +195,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -214,6 +215,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -249,6 +251,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -268,6 +271,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -294,6 +298,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -313,6 +318,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -339,6 +345,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -358,6 +365,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -526,6 +534,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -545,6 +554,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -580,6 +590,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -599,6 +610,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -625,6 +637,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -644,6 +657,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -670,6 +684,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -689,6 +704,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -856,6 +872,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -875,6 +892,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -917,6 +935,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -936,6 +955,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -962,6 +982,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -981,6 +1002,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/combine2_hadoop20.q.out
===================================================================
--- ql/src/test/results/clientpositive/combine2_hadoop20.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/combine2_hadoop20.q.out (working copy)
@@ -263,6 +263,7 @@
numFiles 1
numRows 1
partition_columns value
+ partition_columns.types string
rawDataSize 2
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -281,6 +282,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -306,6 +308,7 @@
numFiles 1
numRows 3
partition_columns value
+ partition_columns.types string
rawDataSize 3
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -324,6 +327,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -349,6 +353,7 @@
numFiles 1
numRows 1
partition_columns value
+ partition_columns.types string
rawDataSize 1
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -367,6 +372,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -392,6 +398,7 @@
numFiles 1
numRows 1
partition_columns value
+ partition_columns.types string
rawDataSize 1
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -410,6 +417,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -435,6 +443,7 @@
numFiles 1
numRows 3
partition_columns value
+ partition_columns.types string
rawDataSize 3
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -453,6 +462,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -478,6 +488,7 @@
numFiles 1
numRows 1
partition_columns value
+ partition_columns.types string
rawDataSize 1
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -496,6 +507,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -521,6 +533,7 @@
numFiles 1
numRows 1
partition_columns value
+ partition_columns.types string
rawDataSize 1
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -539,6 +552,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -564,6 +578,7 @@
numFiles 1
numRows 1
partition_columns value
+ partition_columns.types string
rawDataSize 2
serialization.ddl struct combine2 { string key}
serialization.format 1
@@ -582,6 +597,7 @@
#### A masked pattern was here ####
name default.combine2
partition_columns value
+ partition_columns.types string
serialization.ddl struct combine2 { string key}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_2.q.out (working copy)
@@ -182,6 +182,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -202,6 +203,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -244,6 +246,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -264,6 +267,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -290,6 +294,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -310,6 +315,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -468,6 +474,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -488,6 +495,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -514,6 +522,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -534,6 +543,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy)
@@ -159,6 +159,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -177,6 +178,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -203,6 +205,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -221,6 +224,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out
===================================================================
--- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy)
@@ -191,6 +191,7 @@
numFiles 1
numRows 25
partition_columns ds
+ partition_columns.types string
rawDataSize 211
serialization.ddl struct filter_join_breaktask { i32 key, string value}
serialization.format 1
@@ -209,6 +210,7 @@
#### A masked pattern was here ####
name default.filter_join_breaktask
partition_columns ds
+ partition_columns.types string
serialization.ddl struct filter_join_breaktask { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -314,6 +316,7 @@
numFiles 1
numRows 25
partition_columns ds
+ partition_columns.types string
rawDataSize 211
serialization.ddl struct filter_join_breaktask { i32 key, string value}
serialization.format 1
@@ -332,6 +335,7 @@
#### A masked pattern was here ####
name default.filter_join_breaktask
partition_columns ds
+ partition_columns.types string
serialization.ddl struct filter_join_breaktask { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out
===================================================================
--- ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (working copy)
@@ -174,6 +174,7 @@
numFiles 1
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -194,6 +195,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/input_part9.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part9.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/input_part9.q.out (working copy)
@@ -92,6 +92,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -110,6 +111,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -136,6 +138,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -154,6 +157,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin7.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin7.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin7.q.out (working copy)
@@ -202,6 +202,7 @@
numFiles 2
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -221,6 +222,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -264,6 +266,7 @@
numFiles 2
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -283,6 +286,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin11.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin11.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin11.q.out (working copy)
@@ -259,6 +259,7 @@
numFiles 4
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -278,6 +279,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -302,6 +304,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -321,6 +324,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -363,6 +367,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -382,6 +387,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -408,6 +414,7 @@
numFiles 4
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -427,6 +434,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -628,6 +636,7 @@
numFiles 4
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -647,6 +656,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -671,6 +681,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -690,6 +701,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -732,6 +744,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -751,6 +764,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -777,6 +791,7 @@
numFiles 4
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -796,6 +811,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/join26.q.out
===================================================================
--- ql/src/test/results/clientpositive/join26.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/join26.q.out (working copy)
@@ -201,6 +201,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -219,6 +220,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy)
@@ -213,6 +213,7 @@
numFiles 3
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -232,6 +233,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy)
@@ -237,6 +237,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -256,6 +257,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -298,6 +300,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -317,6 +320,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -824,6 +828,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -843,6 +848,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -885,6 +891,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -904,6 +911,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1602,6 +1610,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -1621,6 +1630,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1645,6 +1655,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -1664,6 +1675,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1706,6 +1718,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -1725,6 +1738,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/join_map_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy)
@@ -203,6 +203,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -221,6 +222,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -811,6 +813,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -829,6 +832,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/join9.q.out
===================================================================
--- ql/src/test/results/clientpositive/join9.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/join9.q.out (working copy)
@@ -164,6 +164,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -182,6 +183,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/smb_mapjoin_11.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (working copy)
@@ -170,6 +170,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -201,6 +202,7 @@
numFiles 16
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct test_table1 { i32 key, string value}
serialization.format 1
@@ -221,6 +223,7 @@
#### A masked pattern was here ####
name default.test_table1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -250,6 +253,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (working copy)
@@ -123,6 +123,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -141,6 +142,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -167,6 +169,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -185,6 +188,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -349,6 +353,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -367,6 +372,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -393,6 +399,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -411,6 +418,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -437,6 +445,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -455,6 +464,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -481,6 +491,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -499,6 +510,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (working copy)
@@ -192,6 +192,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -212,6 +213,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -238,6 +240,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -258,6 +261,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -408,6 +412,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -428,6 +433,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -454,6 +460,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -474,6 +481,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -631,6 +639,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -651,6 +660,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -685,6 +695,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -705,6 +716,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -731,6 +743,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -751,6 +764,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -776,6 +790,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -796,6 +811,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -886,6 +902,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -906,6 +923,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -931,6 +949,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -951,6 +970,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -985,6 +1005,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1005,6 +1026,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1031,6 +1053,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1051,6 +1074,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1076,6 +1100,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -1096,6 +1121,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1183,6 +1209,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1203,6 +1230,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1229,6 +1257,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1249,6 +1278,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/sample1.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/sample1.q.out (working copy)
@@ -130,6 +130,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -148,6 +149,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out
===================================================================
--- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy)
@@ -434,6 +434,7 @@
#### A masked pattern was here ####
name default.complex_tbl_1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct complex_tbl_1 { string aid, string bid, i32 t, string ctime, i64 etime, string l, string et}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -462,6 +463,7 @@
#### A masked pattern was here ####
name default.complex_tbl_1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct complex_tbl_1 { string aid, string bid, i32 t, string ctime, i64 etime, string l, string et}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy)
@@ -119,6 +119,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -137,6 +138,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -279,6 +281,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -297,6 +300,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_4.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_4.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_4.q.out (working copy)
@@ -198,6 +198,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -218,6 +219,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -242,6 +244,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -262,6 +265,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -304,6 +308,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -324,6 +329,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -481,6 +487,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -501,6 +508,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/merge3.q.out
===================================================================
--- ql/src/test/results/clientpositive/merge3.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/merge3.q.out (working copy)
@@ -2484,6 +2484,7 @@
numFiles 1
numRows 1000
partition_columns ds
+ partition_columns.types string
rawDataSize 10624
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
@@ -2502,6 +2503,7 @@
#### A masked pattern was here ####
name default.merge_src_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2527,6 +2529,7 @@
numFiles 1
numRows 1000
partition_columns ds
+ partition_columns.types string
rawDataSize 10624
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
@@ -2545,6 +2548,7 @@
#### A masked pattern was here ####
name default.merge_src_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2577,6 +2581,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2614,6 +2619,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2646,6 +2652,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2671,6 +2678,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2687,6 +2695,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2718,6 +2727,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2743,6 +2753,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2759,6 +2770,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -4979,6 +4991,7 @@
numFiles 1
numRows 1000
partition_columns ds
+ partition_columns.types string
rawDataSize 10624
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
@@ -4997,6 +5010,7 @@
#### A masked pattern was here ####
name default.merge_src_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5022,6 +5036,7 @@
numFiles 1
numRows 1000
partition_columns ds
+ partition_columns.types string
rawDataSize 10624
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
@@ -5040,6 +5055,7 @@
#### A masked pattern was here ####
name default.merge_src_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5076,6 +5092,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5113,6 +5130,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5145,6 +5163,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5170,6 +5189,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5186,6 +5206,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5217,6 +5238,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5242,6 +5264,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -5258,6 +5281,7 @@
#### A masked pattern was here ####
name default.merge_src_part2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct merge_src_part2 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out
===================================================================
--- ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (working copy)
@@ -244,6 +244,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -263,6 +264,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -287,6 +289,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -306,6 +309,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -341,6 +345,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -360,6 +365,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -386,6 +392,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -405,6 +412,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin9.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin9.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin9.q.out (working copy)
@@ -190,6 +190,7 @@
numFiles 3
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -209,6 +210,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -244,6 +246,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -263,6 +266,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -493,6 +497,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -512,6 +517,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -547,6 +553,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -566,6 +573,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin13.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin13.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin13.q.out (working copy)
@@ -194,6 +194,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -213,6 +214,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -248,6 +250,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -267,6 +270,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -293,6 +297,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -312,6 +317,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -510,6 +516,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -529,6 +536,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -571,6 +579,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -590,6 +599,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -792,6 +802,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -811,6 +822,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -853,6 +865,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -872,6 +885,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1076,6 +1090,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -1095,6 +1110,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1137,6 +1153,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -1156,6 +1173,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/union22.q.out
===================================================================
--- ql/src/test/results/clientpositive/union22.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/union22.q.out (working copy)
@@ -263,6 +263,7 @@
numFiles 1
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 16936
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
@@ -281,6 +282,7 @@
#### A masked pattern was here ####
name default.dst_union22_delta
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -381,6 +383,7 @@
numFiles 1
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 11124
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
@@ -399,6 +402,7 @@
#### A masked pattern was here ####
name default.dst_union22
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -424,6 +428,7 @@
numFiles 1
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 16936
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
@@ -442,6 +447,7 @@
#### A masked pattern was here ####
name default.dst_union22_delta
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -482,6 +488,7 @@
#### A masked pattern was here ####
name default.dst_union22
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -528,6 +535,7 @@
#### A masked pattern was here ####
name default.dst_union22
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -578,6 +586,7 @@
numFiles 1
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 16936
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
@@ -596,6 +605,7 @@
#### A masked pattern was here ####
name default.dst_union22_delta
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -625,6 +635,7 @@
#### A masked pattern was here ####
name default.dst_union22
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -694,6 +705,7 @@
numFiles 1
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 11124
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
@@ -712,6 +724,7 @@
#### A masked pattern was here ####
name default.dst_union22
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -737,6 +750,7 @@
numFiles 1
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 16936
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
@@ -755,6 +769,7 @@
#### A masked pattern was here ####
name default.dst_union22_delta
partition_columns ds
+ partition_columns.types string
serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/join32.q.out
===================================================================
--- ql/src/test/results/clientpositive/join32.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/join32.q.out (working copy)
@@ -189,6 +189,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -207,6 +208,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -335,6 +337,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -353,6 +356,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/input_part1.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/input_part1.q.out (working copy)
@@ -143,6 +143,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -161,6 +162,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (working copy)
@@ -205,6 +205,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -225,6 +226,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -251,6 +253,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -271,6 +274,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -423,6 +427,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -443,6 +448,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -469,6 +475,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -489,6 +496,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -650,6 +658,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -670,6 +679,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -694,6 +704,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -714,6 +725,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -748,6 +760,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -768,6 +781,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -794,6 +808,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -814,6 +829,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -839,6 +855,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -859,6 +876,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -884,6 +902,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -904,6 +923,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -994,6 +1014,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1014,6 +1035,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1039,6 +1061,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1059,6 +1082,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1093,6 +1117,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1113,6 +1138,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1139,6 +1165,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1159,6 +1186,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1184,6 +1212,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -1204,6 +1233,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1229,6 +1259,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -1249,6 +1280,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1337,6 +1369,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1357,6 +1390,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1383,6 +1417,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1403,6 +1438,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/columnstats_partlvl.q.out
===================================================================
--- ql/src/test/results/clientpositive/columnstats_partlvl.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out (working copy)
@@ -142,6 +142,7 @@
numFiles 1
numRows 0
partition_columns employeesalary
+ partition_columns.types double
rawDataSize 0
serialization.ddl struct employee_part { i32 employeeid, string employeename}
serialization.format |
@@ -161,6 +162,7 @@
#### A masked pattern was here ####
name default.employee_part
partition_columns employeesalary
+ partition_columns.types double
serialization.ddl struct employee_part { i32 employeeid, string employeename}
serialization.format |
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -332,6 +334,7 @@
numFiles 1
numRows 0
partition_columns employeesalary
+ partition_columns.types double
rawDataSize 0
serialization.ddl struct employee_part { i32 employeeid, string employeename}
serialization.format |
@@ -351,6 +354,7 @@
#### A masked pattern was here ####
name default.employee_part
partition_columns employeesalary
+ partition_columns.types double
serialization.ddl struct employee_part { i32 employeeid, string employeename}
serialization.format |
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (working copy)
@@ -172,6 +172,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -192,6 +193,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -341,6 +343,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -361,6 +364,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -517,6 +521,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -537,6 +542,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -561,6 +567,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -581,6 +588,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -615,6 +623,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -635,6 +644,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -660,6 +670,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -680,6 +691,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -705,6 +717,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -725,6 +738,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -814,6 +828,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -834,6 +849,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -868,6 +884,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -888,6 +905,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -913,6 +931,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -933,6 +952,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -958,6 +978,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -978,6 +999,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1066,6 +1088,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1086,6 +1109,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/sample8.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample8.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/sample8.q.out (working copy)
@@ -171,6 +171,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -189,6 +190,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -215,6 +217,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -233,6 +236,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -259,6 +263,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -277,6 +282,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -303,6 +309,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -321,6 +328,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/transform_ppr2.q.out
===================================================================
--- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy)
@@ -155,6 +155,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -173,6 +174,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -199,6 +201,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -217,6 +220,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/union_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy)
@@ -182,6 +182,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -200,6 +201,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -226,6 +228,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -244,6 +247,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/ppd_vc.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_vc.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/ppd_vc.q.out (working copy)
@@ -92,6 +92,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -110,6 +111,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -136,6 +138,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -154,6 +157,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -180,6 +184,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -198,6 +203,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -224,6 +230,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -242,6 +249,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -493,6 +501,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -511,6 +520,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -537,6 +547,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -555,6 +566,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -581,6 +593,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -599,6 +612,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -625,6 +639,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -643,6 +658,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_6.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_6.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_6.q.out (working copy)
@@ -198,6 +198,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -218,6 +219,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -244,6 +246,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -264,6 +267,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -420,6 +424,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -440,6 +445,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -466,6 +472,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -486,6 +493,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out
===================================================================
--- ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out (working copy)
@@ -141,6 +141,7 @@
numFiles 1
numRows 1
partition_columns partcol1/partcol2
+ partition_columns.types string:string
rawDataSize 1
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
@@ -159,6 +160,7 @@
#### A masked pattern was here ####
name default.dynamic_part_table
partition_columns partcol1/partcol2
+ partition_columns.types string:string
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -267,6 +269,7 @@
numFiles 1
numRows 1
partition_columns partcol1/partcol2
+ partition_columns.types string:string
rawDataSize 1
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
@@ -285,6 +288,7 @@
#### A masked pattern was here ####
name default.dynamic_part_table
partition_columns partcol1/partcol2
+ partition_columns.types string:string
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -403,6 +407,7 @@
numFiles 1
numRows 1
partition_columns partcol1/partcol2
+ partition_columns.types string:string
rawDataSize 1
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
@@ -421,6 +426,7 @@
#### A masked pattern was here ####
name default.dynamic_part_table
partition_columns partcol1/partcol2
+ partition_columns.types string:string
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -447,6 +453,7 @@
numFiles 1
numRows 1
partition_columns partcol1/partcol2
+ partition_columns.types string:string
rawDataSize 1
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
@@ -465,6 +472,7 @@
#### A masked pattern was here ####
name default.dynamic_part_table
partition_columns partcol1/partcol2
+ partition_columns.types string:string
serialization.ddl struct dynamic_part_table { string intcol}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/stats12.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats12.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/stats12.q.out (working copy)
@@ -95,6 +95,7 @@
numFiles 1
numRows -1
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize -1
serialization.ddl struct analyze_srcpart { string key, string value}
serialization.format 1
@@ -113,6 +114,7 @@
#### A masked pattern was here ####
name default.analyze_srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct analyze_srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -139,6 +141,7 @@
numFiles 1
numRows -1
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize -1
serialization.ddl struct analyze_srcpart { string key, string value}
serialization.format 1
@@ -157,6 +160,7 @@
#### A masked pattern was here ####
name default.analyze_srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct analyze_srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/router_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy)
@@ -204,6 +204,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -222,6 +223,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -248,6 +250,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -266,6 +269,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -292,6 +296,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -310,6 +315,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -336,6 +342,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -354,6 +361,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -664,6 +672,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -682,6 +691,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -708,6 +718,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -726,6 +737,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1025,6 +1037,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1043,6 +1056,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1069,6 +1083,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1087,6 +1102,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1386,6 +1402,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1404,6 +1421,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1430,6 +1448,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1448,6 +1467,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1474,6 +1494,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1492,6 +1513,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1518,6 +1540,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1536,6 +1559,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_1.q.out (working copy)
@@ -198,6 +198,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -218,6 +219,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -260,6 +262,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -280,6 +283,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -306,6 +310,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -326,6 +331,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -484,6 +490,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -504,6 +511,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -530,6 +538,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -550,6 +559,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/input42.q.out
===================================================================
--- ql/src/test/results/clientpositive/input42.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/input42.q.out (working copy)
@@ -83,6 +83,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -101,6 +102,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -127,6 +129,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -145,6 +148,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1291,6 +1295,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1309,6 +1314,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1335,6 +1341,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1353,6 +1360,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1878,6 +1886,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1896,6 +1905,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1922,6 +1932,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1940,6 +1951,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin10.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin10.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin10.q.out (working copy)
@@ -248,6 +248,7 @@
numFiles 3
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -267,6 +268,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -291,6 +293,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -310,6 +313,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -345,6 +349,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -364,6 +369,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -390,6 +396,7 @@
numFiles 3
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -409,6 +416,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy)
@@ -496,6 +496,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -515,6 +516,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1097,6 +1099,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -1116,6 +1119,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/sample10.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample10.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/sample10.q.out (working copy)
@@ -145,6 +145,7 @@
numFiles 4
numRows 10
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 60
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
@@ -164,6 +165,7 @@
#### A masked pattern was here ####
name default.srcpartbucket
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -191,6 +193,7 @@
numFiles 4
numRows 10
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 60
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
@@ -210,6 +213,7 @@
#### A masked pattern was here ####
name default.srcpartbucket
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -237,6 +241,7 @@
numFiles 4
numRows 10
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 60
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
@@ -256,6 +261,7 @@
#### A masked pattern was here ####
name default.srcpartbucket
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -283,6 +289,7 @@
numFiles 4
numRows 10
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 60
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
@@ -302,6 +309,7 @@
#### A masked pattern was here ####
name default.srcpartbucket
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpartbucket { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy)
@@ -204,6 +204,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -222,6 +223,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -248,6 +250,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -266,6 +269,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -565,6 +569,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -583,6 +588,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -609,6 +615,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -627,6 +634,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -653,6 +661,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -671,6 +680,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -697,6 +707,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -715,6 +726,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1025,6 +1037,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1043,6 +1056,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1069,6 +1083,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1087,6 +1102,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1113,6 +1129,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1131,6 +1148,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1157,6 +1175,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1175,6 +1194,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1480,6 +1500,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1498,6 +1519,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1524,6 +1546,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -1542,6 +1565,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_8.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_8.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_8.q.out (working copy)
@@ -215,6 +215,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -235,6 +236,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -259,6 +261,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -279,6 +282,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -321,6 +325,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -341,6 +346,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -367,6 +373,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -387,6 +394,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -547,6 +555,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -567,6 +576,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -593,6 +603,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -613,6 +624,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (working copy)
@@ -274,6 +274,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -294,6 +295,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -323,6 +325,7 @@
numFiles 3
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_medium { string key, string value}
serialization.format 1
@@ -343,6 +346,7 @@
#### A masked pattern was here ####
name default.bucket_medium
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_medium { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -372,6 +376,7 @@
numFiles 3
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_medium { string key, string value}
serialization.format 1
@@ -392,6 +397,7 @@
#### A masked pattern was here ####
name default.bucket_medium
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_medium { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -437,6 +443,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -457,6 +464,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -483,6 +491,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -503,6 +512,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -529,6 +539,7 @@
numFiles 3
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_medium { string key, string value}
serialization.format 1
@@ -549,6 +560,7 @@
#### A masked pattern was here ####
name default.bucket_medium
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_medium { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -575,6 +587,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -595,6 +608,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy)
@@ -124,6 +124,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -142,6 +143,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -168,6 +170,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -186,6 +189,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketcontext_3.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketcontext_3.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketcontext_3.q.out (working copy)
@@ -182,6 +182,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -202,6 +203,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -226,6 +228,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -246,6 +249,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -288,6 +292,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -308,6 +313,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -465,6 +471,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -485,6 +492,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucket3.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucket3.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucket3.q.out (working copy)
@@ -134,6 +134,7 @@
#### A masked pattern was here ####
name default.bucket3_1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket3_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -163,6 +164,7 @@
#### A masked pattern was here ####
name default.bucket3_1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket3_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy)
@@ -152,6 +152,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -170,6 +171,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -196,6 +198,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -214,6 +217,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out
===================================================================
--- ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (working copy)
@@ -181,6 +181,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -200,6 +201,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -242,6 +244,7 @@
numFiles 2
numRows 500
partition_columns part
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -262,6 +265,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin8.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin8.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin8.q.out (working copy)
@@ -183,6 +183,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -202,6 +203,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -244,6 +246,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -263,6 +266,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -461,6 +465,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -480,6 +485,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -522,6 +528,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -541,6 +548,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin12.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin12.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin12.q.out (working copy)
@@ -217,6 +217,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -235,6 +236,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -277,6 +279,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -296,6 +299,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -484,6 +488,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value}
serialization.format 1
@@ -503,6 +508,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_3
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -538,6 +544,7 @@
numFiles 2
numRows 0
partition_columns part
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
@@ -557,6 +564,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_1
partition_columns part
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy)
@@ -267,6 +267,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -286,6 +287,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -328,6 +330,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -347,6 +350,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -861,6 +865,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -880,6 +885,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -922,6 +928,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -941,6 +948,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/annotate_stats_part.q.out
===================================================================
--- ql/src/test/results/clientpositive/annotate_stats_part.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/annotate_stats_part.q.out (working copy)
@@ -149,6 +149,7 @@
numFiles 1
numRows -1
partition_columns year
+ partition_columns.types string
rawDataSize -1
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -167,6 +168,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -190,6 +192,7 @@
numFiles 1
numRows -1
partition_columns year
+ partition_columns.types string
rawDataSize -1
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -208,6 +211,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -303,6 +307,7 @@
numFiles 1
numRows -1
partition_columns year
+ partition_columns.types string
rawDataSize -1
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -321,6 +326,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -391,6 +397,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -409,6 +416,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -432,6 +440,7 @@
numFiles 1
numRows -1
partition_columns year
+ partition_columns.types string
rawDataSize -1
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -450,6 +459,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -525,6 +535,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -543,6 +554,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -642,6 +654,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -660,6 +673,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -730,6 +744,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -748,6 +763,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -771,6 +787,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -789,6 +806,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -869,6 +887,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -887,6 +906,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -910,6 +930,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -928,6 +949,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1113,6 +1135,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1131,6 +1154,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1156,6 +1180,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1174,6 +1199,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1276,6 +1302,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1294,6 +1321,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1319,6 +1347,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1337,6 +1366,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1444,6 +1474,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1462,6 +1493,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1487,6 +1519,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1505,6 +1538,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1615,6 +1649,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1633,6 +1668,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1742,6 +1778,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1760,6 +1797,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1826,6 +1864,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1844,6 +1883,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1867,6 +1907,7 @@
numFiles 1
numRows 1
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -1885,6 +1926,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2006,6 +2048,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -2024,6 +2067,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2140,6 +2184,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -2158,6 +2203,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2285,6 +2331,7 @@
numFiles 1
numRows 7
partition_columns year
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
@@ -2303,6 +2350,7 @@
#### A masked pattern was here ####
name default.loc_orc
partition_columns year
+ partition_columns.types string
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
Index: ql/src/test/results/clientpositive/smb_mapjoin_12.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (working copy)
@@ -191,6 +191,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -222,6 +223,7 @@
numFiles 16
numRows 500
partition_columns ds
+ partition_columns.types string
rawDataSize 5312
serialization.ddl struct test_table1 { i32 key, string value}
serialization.format 1
@@ -242,6 +244,7 @@
#### A masked pattern was here ####
name default.test_table1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table1 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -272,6 +275,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -474,6 +478,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -505,6 +510,7 @@
numFiles 16
numRows 3084
partition_columns ds
+ partition_columns.types string
rawDataSize 32904
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
@@ -525,6 +531,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -555,6 +562,7 @@
#### A masked pattern was here ####
name default.test_table3
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test_table3 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/metadataonly1.q.out
===================================================================
--- ql/src/test/results/clientpositive/metadataonly1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/metadataonly1.q.out (working copy)
@@ -155,6 +155,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -171,6 +172,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -297,6 +299,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -313,6 +316,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -437,6 +441,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -453,6 +458,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -616,6 +622,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -632,6 +639,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -654,6 +662,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -670,6 +679,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -768,6 +778,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -784,6 +795,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -806,6 +818,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -822,6 +835,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1059,6 +1073,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1075,6 +1090,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1098,6 +1114,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1114,6 +1131,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1137,6 +1155,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1153,6 +1172,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1294,6 +1314,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1310,6 +1331,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1333,6 +1355,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1349,6 +1372,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1372,6 +1396,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1388,6 +1413,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1519,6 +1545,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1535,6 +1562,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1557,6 +1585,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1573,6 +1602,7 @@
#### A masked pattern was here ####
name default.test1
partition_columns ds
+ partition_columns.types string
serialization.ddl struct test1 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1765,6 +1795,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1781,6 +1812,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1804,6 +1836,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1820,6 +1853,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1843,6 +1877,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1859,6 +1894,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1882,6 +1918,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1898,6 +1935,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1921,6 +1959,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -1937,6 +1976,7 @@
#### A masked pattern was here ####
name default.test2
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct test2 { i32 a, double b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/join32_lessSize.q.out
===================================================================
--- ql/src/test/results/clientpositive/join32_lessSize.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/join32_lessSize.q.out (working copy)
@@ -322,6 +322,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -340,6 +341,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -395,6 +397,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -413,6 +416,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2052,6 +2056,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -2070,6 +2075,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2125,6 +2131,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -2143,6 +2150,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2653,6 +2661,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -2671,6 +2680,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -2726,6 +2736,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -2744,6 +2755,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (working copy)
@@ -205,6 +205,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -225,6 +226,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -251,6 +253,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -271,6 +274,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -423,6 +427,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -443,6 +448,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -469,6 +475,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -489,6 +496,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -648,6 +656,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -668,6 +677,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -692,6 +702,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -712,6 +723,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -746,6 +758,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -766,6 +779,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -792,6 +806,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -812,6 +827,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -837,6 +853,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -857,6 +874,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -882,6 +900,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -902,6 +921,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -992,6 +1012,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1012,6 +1033,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1037,6 +1059,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1057,6 +1080,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1091,6 +1115,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1111,6 +1136,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1137,6 +1163,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1157,6 +1184,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1182,6 +1210,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -1202,6 +1231,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1227,6 +1257,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -1247,6 +1278,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1335,6 +1367,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1355,6 +1388,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1381,6 +1415,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1401,6 +1436,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy)
@@ -196,6 +196,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -214,6 +215,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -240,6 +242,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -258,6 +261,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -284,6 +288,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -302,6 +307,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -328,6 +334,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -346,6 +353,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -648,6 +656,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -666,6 +675,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -692,6 +702,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -710,6 +721,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -736,6 +748,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -754,6 +767,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -780,6 +794,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -798,6 +813,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/list_bucket_dml_10.q.out
===================================================================
--- ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (working copy)
@@ -119,6 +119,7 @@
columns.types string:string
#### A masked pattern was here ####
name default.list_bucketing_static_part
+ partition_columns.types string:string
serialization.ddl struct list_bucketing_static_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -205,6 +206,7 @@
columns.types string:string
#### A masked pattern was here ####
name default.list_bucketing_static_part
+ partition_columns.types string:string
serialization.ddl struct list_bucketing_static_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -232,6 +234,7 @@
columns.types string:string
#### A masked pattern was here ####
name default.list_bucketing_static_part
+ partition_columns.types string:string
serialization.ddl struct list_bucketing_static_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -247,6 +250,7 @@
columns.types string:string
#### A masked pattern was here ####
name default.list_bucketing_static_part
+ partition_columns.types string:string
serialization.ddl struct list_bucketing_static_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -273,6 +277,7 @@
columns.types string:string
#### A masked pattern was here ####
name default.list_bucketing_static_part
+ partition_columns.types string:string
serialization.ddl struct list_bucketing_static_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -288,6 +293,7 @@
columns.types string:string
#### A masked pattern was here ####
name default.list_bucketing_static_part
+ partition_columns.types string:string
serialization.ddl struct list_bucketing_static_part { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
===================================================================
--- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy)
@@ -216,6 +216,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -235,6 +236,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -259,6 +261,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
@@ -278,6 +281,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (working copy)
@@ -172,6 +172,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -192,6 +193,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -218,6 +220,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -238,6 +241,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -397,6 +401,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -417,6 +422,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -451,6 +457,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -471,6 +478,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -497,6 +505,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -517,6 +526,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -542,6 +552,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -562,6 +573,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -652,6 +664,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -672,6 +685,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -697,6 +711,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -717,6 +732,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -751,6 +767,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -771,6 +788,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -797,6 +815,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -817,6 +836,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -842,6 +862,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
@@ -862,6 +883,7 @@
#### A masked pattern was here ####
name default.bucket_small
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_small { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -949,6 +971,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -969,6 +992,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -995,6 +1019,7 @@
numFiles 2
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
@@ -1015,6 +1040,7 @@
#### A masked pattern was here ####
name default.bucket_big
partition_columns ds
+ partition_columns.types string
serialization.ddl struct bucket_big { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/transform_ppr1.q.out
===================================================================
--- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy)
@@ -153,6 +153,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -171,6 +172,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -197,6 +199,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -215,6 +218,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -241,6 +245,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -259,6 +264,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -285,6 +291,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -303,6 +310,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/ppd_union_view.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_union_view.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/ppd_union_view.q.out (working copy)
@@ -271,6 +271,7 @@
numFiles 1
numRows 1
partition_columns ds
+ partition_columns.types string
rawDataSize 12
serialization.ddl struct t1_mapping { string key, string keymap}
serialization.format 1
@@ -289,6 +290,7 @@
#### A masked pattern was here ####
name default.t1_mapping
partition_columns ds
+ partition_columns.types string
serialization.ddl struct t1_mapping { string key, string keymap}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -314,6 +316,7 @@
numFiles 1
numRows 1
partition_columns ds
+ partition_columns.types string
rawDataSize 14
serialization.ddl struct t1_old { string keymap, string value}
serialization.format 1
@@ -332,6 +335,7 @@
#### A masked pattern was here ####
name default.t1_old
partition_columns ds
+ partition_columns.types string
serialization.ddl struct t1_old { string keymap, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -685,6 +689,7 @@
numFiles 1
numRows 1
partition_columns ds
+ partition_columns.types string
rawDataSize 11
serialization.ddl struct t1_new { string key, string value}
serialization.format 1
@@ -703,6 +708,7 @@
#### A masked pattern was here ####
name default.t1_new
partition_columns ds
+ partition_columns.types string
serialization.ddl struct t1_new { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/stats11.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats11.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/stats11.q.out (working copy)
@@ -442,6 +442,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -461,6 +462,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1043,6 +1045,7 @@
numFiles 4
numRows 0
partition_columns ds
+ partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
@@ -1062,6 +1065,7 @@
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part
partition_columns ds
+ partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/input23.q.out
===================================================================
--- ql/src/test/results/clientpositive/input23.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/input23.q.out (working copy)
@@ -96,6 +96,7 @@
numFiles 1
numRows 0
partition_columns ds/hr
+ partition_columns.types string:string
rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
@@ -114,6 +115,7 @@
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
+ partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Index: ql/src/test/results/clientpositive/filter_numeric.q.out
===================================================================
--- ql/src/test/results/clientpositive/filter_numeric.q.out (revision 1583107)
+++ ql/src/test/results/clientpositive/filter_numeric.q.out (working copy)
@@ -74,7 +74,7 @@
alias: partint
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), hr (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -623,7 +623,7 @@
alias: partint
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), hr (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -1674,7 +1674,7 @@
alias: partint
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), hr (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2223,7 +2223,7 @@
alias: partint
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), hr (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
File Output Operator
Index: ql/src/test/results/compiler/plan/input2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input2.q.xml (revision 1583107)
+++ ql/src/test/results/compiler/plan/input2.q.xml (working copy)
@@ -1166,6 +1107,10 @@
defaultdefault
+ partition_columns.types
+ string:string
+
+
partition_columns
ds/hr
@@ -1337,6 +1282,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
columns.comments
defaultdefault
Index: ql/src/test/results/compiler/plan/input3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input3.q.xml (revision 1583107)
+++ ql/src/test/results/compiler/plan/input3.q.xml (working copy)
@@ -1225,6 +1225,10 @@
defaultdefault
+ partition_columns.types
+ string:string
+
+
partition_columns
ds/hr
@@ -1396,6 +1400,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
columns.comments
defaultdefault
@@ -1563,6 +1571,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
columns.comments
defaultdefault
Index: ql/src/test/results/compiler/plan/input_part1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1583107)
+++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy)
@@ -47,6 +47,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
COLUMN_STATS_ACCURATE
true
@@ -139,6 +143,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
columns.comments
defaultdefault
@@ -818,6 +826,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
COLUMN_STATS_ACCURATE
true
Index: ql/src/test/results/compiler/plan/sample1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1583107)
+++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy)
@@ -47,6 +47,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
COLUMN_STATS_ACCURATE
true
@@ -139,6 +143,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
columns.comments
defaultdefault
@@ -907,6 +915,10 @@
key,value
+ partition_columns.types
+ string:string
+
+
COLUMN_STATS_ACCURATE
true
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java (revision 1583107)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java (working copy)
@@ -202,7 +202,7 @@
Assert.assertEquals("Field size should be 9", colCount, fieldRefs.size());
// Create the context
- VectorizedRowBatchCtx ctx = new VectorizedRowBatchCtx(oi, oi, serDe, null);
+ VectorizedRowBatchCtx ctx = new VectorizedRowBatchCtx(oi, oi, serDe, null, null);
VectorizedRowBatch batch = ctx.createVectorizedRowBatch();
VectorizedBatchUtil.SetNoNullFields(true, batch);
Index: ql/src/test/queries/clientpositive/alter_partition_coltype.q
===================================================================
--- ql/src/test/queries/clientpositive/alter_partition_coltype.q (revision 1583107)
+++ ql/src/test/queries/clientpositive/alter_partition_coltype.q (working copy)
@@ -10,48 +10,48 @@
select count(*) from alter_coltype where dt = '100x';
-- alter partition key column data type for dt column.
-alter table alter_coltype partition column (dt int);
+-- alter table alter_coltype partition column (dt int);
-- load a new partition using new data type.
-insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from src1;
+-- insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from src1;
-- make sure the partition predicate still works.
-select count(*) from alter_coltype where dt = '100x';
-explain extended select count(*) from alter_coltype where dt = '100x';
+-- select count(*) from alter_coltype where dt = '100x';
+-- explain extended select count(*) from alter_coltype where dt = '100x';
-select count(*) from alter_coltype where dt = '100';
+-- select count(*) from alter_coltype where dt = '100';
-- alter partition key column data type for ts column.
-alter table alter_coltype partition column (ts double);
+-- alter table alter_coltype partition column (ts double);
-alter table alter_coltype partition column (dt string);
+-- alter table alter_coltype partition column (dt string);
-- load a new partition using new data type.
-insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
+-- insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
-- validate partition key column predicate can still work.
-select count(*) from alter_coltype where ts = '6:30pm';
-explain extended select count(*) from alter_coltype where ts = '6:30pm';
+-- select count(*) from alter_coltype where ts = '6:30pm';
+-- explain extended select count(*) from alter_coltype where ts = '6:30pm';
-- validate partition key column predicate on two different partition column data type
-- can still work.
-select count(*) from alter_coltype where ts = 3.0 and dt=10;
-explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10;
+-- select count(*) from alter_coltype where ts = 3.0 and dt=10;
+-- explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10;
-- query where multiple partition values (of different datatypes) are being selected
-select key, value, dt, ts from alter_coltype where dt is not null;
-explain extended select key, value, dt, ts from alter_coltype where dt is not null;
+-- select key, value, dt, ts from alter_coltype where dt is not null;
+-- explain extended select key, value, dt, ts from alter_coltype where dt is not null;
-select count(*) from alter_coltype where ts = 3.0;
+-- select count(*) from alter_coltype where ts = 3.0;
-- make sure the partition predicate still works.
-select count(*) from alter_coltype where dt = '100x' or dt = '10';
-explain extended select count(*) from alter_coltype where dt = '100x' or dt = '10';
+-- select count(*) from alter_coltype where dt = '100x' or dt = '10';
+-- explain extended select count(*) from alter_coltype where dt = '100x' or dt = '10';
-desc alter_coltype;
-desc alter_coltype partition (dt='100x', ts='6:30pm');
-desc alter_coltype partition (dt='100x', ts=3.0);
-desc alter_coltype partition (dt=10, ts=3.0);
+-- desc alter_coltype;
+-- desc alter_coltype partition (dt='100x', ts='6:30pm');
+-- desc alter_coltype partition (dt='100x', ts=3.0);
+-- desc alter_coltype partition (dt=10, ts=3.0);
drop table alter_coltype;
Index: ql/src/test/queries/clientpositive/pcr.q
===================================================================
--- ql/src/test/queries/clientpositive/pcr.q (revision 1583107)
+++ ql/src/test/queries/clientpositive/pcr.q (working copy)
@@ -112,7 +112,7 @@
insert overwrite table pcr_foo partition (ds=7) select * from src where key < 10 order by key;
-- the condition is 'true' for all the 3 partitions (ds=3,5,7):
-select key, value, ds from pcr_foo where (ds % 2 == 1);
+select key, value, ds from pcr_foo where (ds % 2.0 == 1);
-- the condition is 'true' for partitions (ds=3,5) but 'false' of partition ds=7:
select key, value, ds from pcr_foo where (ds / 3 < 2);
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1583107)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy)
@@ -8589,10 +8589,8 @@
// Finally add the partitioning columns
for (FieldSchema part_col : tab.getPartCols()) {
LOG.trace("Adding partition col: " + part_col);
- // TODO: use the right type by calling part_col.getType() instead of
- // String.class. See HIVE-3059.
rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(),
- TypeInfoFactory.stringTypeInfo, alias, true));
+ TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
}
// put all virutal columns in RowResolver.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (revision 1583107)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (working copy)
@@ -53,6 +53,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -179,7 +180,7 @@
PartitionDesc pd = ctx.partDesc;
TableDesc td = pd.getTableDesc();
-
+
MapOpCtx opCtx = new MapOpCtx();
// Use table properties in case of unpartitioned tables,
// and the union of table properties and partition properties, with partition
@@ -203,28 +204,43 @@
opCtx.partTblObjectInspectorConverter = ObjectInspectorConverters.getConverter(
partRawRowObjectInspector, opCtx.tblRawRowObjectInspector);
-
+
// Next check if this table has partitions and if so
// get the list of partition names as well as allocate
// the serdes for the partition columns
String pcols = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
- // Log LOG = LogFactory.getLog(MapOperator.class.getName());
+
if (pcols != null && pcols.length() > 0) {
String[] partKeys = pcols.trim().split("/");
+ String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
+ String[] partKeyTypes = pcolTypes.trim().split(":");
+
+ if (partKeys.length > partKeyTypes.length) {
+ throw new HiveException("Internal error : partKeys length, " +partKeys.length +
+ " greater than partKeyTypes length, " + partKeyTypes.length);
+ }
+
List partNames = new ArrayList(partKeys.length);
Object[] partValues = new Object[partKeys.length];
List partObjectInspectors = new ArrayList(partKeys.length);
+
for (int i = 0; i < partKeys.length; i++) {
String key = partKeys[i];
partNames.add(key);
+ ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector
+ (TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i]));
+
// Partitions do not exist for this table
if (partSpec == null) {
// for partitionless table, initialize partValue to null
partValues[i] = null;
} else {
- partValues[i] = new Text(partSpec.get(key));
+ partValues[i] =
+ ObjectInspectorConverters.
+ getConverter(PrimitiveObjectInspectorFactory.
+ javaStringObjectInspector, oi).convert(partSpec.get(key));
}
- partObjectInspectors.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+ partObjectInspectors.add(oi);
}
opCtx.rowWithPart = new Object[] {null, partValues};
opCtx.partObjectInspector = ObjectInspectorFactory
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (revision 1583107)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (working copy)
@@ -59,6 +59,11 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.InputFormat;
@@ -245,9 +250,12 @@
String pcols = partition.getTableDesc().getProperties().getProperty(
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
String[] partKeys = pcols.trim().split("/");
- row[1] = createPartValue(partKeys, partition.getPartSpec());
+ String pcolTypes = partition.getTableDesc().getProperties().getProperty(
+ org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
+ String[] partKeyTypes = pcolTypes.trim().split(":");
+ row[1] = createPartValue(partKeys, partition.getPartSpec(), partKeyTypes);
- return createRowInspector(getStructOIFrom(partitionOI), partKeys);
+ return createRowInspector(getStructOIFrom(partitionOI), partKeys, partKeyTypes);
}
private StructObjectInspector getRowInspectorFromPartitionedTable(TableDesc table)
@@ -257,8 +265,11 @@
String pcols = table.getProperties().getProperty(
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
String[] partKeys = pcols.trim().split("/");
+ String pcolTypes = table.getProperties().getProperty(
+ org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
+ String[] partKeyTypes = pcolTypes.trim().split(":");
row[1] = null;
- return createRowInspector(getStructOIFrom(serde.getObjectInspector()), partKeys);
+ return createRowInspector(getStructOIFrom(serde.getObjectInspector()), partKeys, partKeyTypes);
}
private StructObjectInspector getStructOIFrom(ObjectInspector current) throws SerDeException {
@@ -276,13 +287,16 @@
Arrays.asList(current, vcsOI)) : current;
}
- private StructObjectInspector createRowInspector(StructObjectInspector current, String[] partKeys)
+ private StructObjectInspector createRowInspector(StructObjectInspector current, String[] partKeys, String[] partKeyTypes)
throws SerDeException {
List partNames = new ArrayList();
List partObjectInspectors = new ArrayList();
- for (String key : partKeys) {
- partNames.add(key);
- partObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+ for (int i = 0; i < partKeys.length; i++) {
+ String key = partKeys[i];
+ partNames.add(key);
+ ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
+ TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i]));
+ partObjectInspectors.add(oi);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory
.getStandardStructObjectInspector(partNames, partObjectInspectors);
@@ -292,10 +306,16 @@
Arrays.asList(current, partObjectInspector));
}
- private List createPartValue(String[] partKeys, Map partSpec) {
- List partValues = new ArrayList();
- for (String key : partKeys) {
- partValues.add(partSpec.get(key));
+ private Object[] createPartValue(String[] partKeys, Map partSpec, String[] partKeyTypes) {
+ Object[] partValues = new Object[partKeys.length];
+ for (int i = 0; i < partKeys.length; i++) {
+ String key = partKeys[i];
+ ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
+ TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i]));
+ partValues[i] =
+ ObjectInspectorConverters.
+ getConverter(PrimitiveObjectInspectorFactory.
+ javaStringObjectInspector, oi).convert(partSpec.get(key));
}
return partValues;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java (revision 1583107)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java (working copy)
@@ -260,7 +260,7 @@
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_MAXENTRIES);
this.minReductionHashAggr = HiveConf.getFloatVar(hconf,
HiveConf.ConfVars.HIVEMAPAGGRHASHMINREDUCTION);
- this.numRowsCompareHashAggr = HiveConf.getLongVar(hconf,
+ this.numRowsCompareHashAggr = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVEGROUPBYMAPINTERVAL);
}
else {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (revision 1583107)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (working copy)
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.exec.vector;
import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
@@ -28,6 +30,8 @@
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
@@ -38,12 +42,17 @@
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileSplit;
@@ -54,7 +63,7 @@
* with the partition column.
*/
public class VectorizedRowBatchCtx {
-
+
// OI for raw row data (EG without partition cols)
private StructObjectInspector rawRowOI;
@@ -65,8 +74,11 @@
private Deserializer deserializer;
// Hash map of partition values. Key=TblColName value=PartitionValue
- private Map partitionValues;
-
+ private Map partitionValues;
+
+ //partition types
+ private Map partitionTypes;
+
// Column projection list - List of column indexes to include. This
// list does not contain partition columns
private List colsToInclude;
@@ -86,11 +98,13 @@
* Hash map of partition values. Key=TblColName value=PartitionValue
*/
public VectorizedRowBatchCtx(StructObjectInspector rawRowOI, StructObjectInspector rowOI,
- Deserializer deserializer, Map partitionValues) {
+ Deserializer deserializer, Map partitionValues,
+ Map partitionTypes) {
this.rowOI = rowOI;
this.rawRowOI = rawRowOI;
this.deserializer = deserializer;
this.partitionValues = partitionValues;
+ this.partitionTypes = partitionTypes;
}
/**
@@ -173,25 +187,44 @@
// raw row object inspector (row with out partition col)
LinkedHashMap partSpec = part.getPartSpec();
String[] partKeys = pcols.trim().split("/");
+ String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
+ String[] partKeyTypes = pcolTypes.trim().split(":");
+
+ if (partKeys.length > partKeyTypes.length) {
+ throw new HiveException("Internal error : partKeys length, " +partKeys.length +
+ " greater than partKeyTypes length, " + partKeyTypes.length);
+ }
+
List partNames = new ArrayList(partKeys.length);
- partitionValues = new LinkedHashMap();
- List partObjectInspectors = new ArrayList(
- partKeys.length);
+ List partObjectInspectors = new ArrayList(partKeys.length);
+ partitionValues = new LinkedHashMap();
+ partitionTypes = new LinkedHashMap();
for (int i = 0; i < partKeys.length; i++) {
String key = partKeys[i];
partNames.add(key);
+ ObjectInspector objectInspector = null;
+ Object objectVal;
if (partSpec == null) {
// for partitionless table, initialize partValue to empty string.
// We can have partitionless table even if we have partition keys
// when there is only only partition selected and the partition key is not
// part of the projection/include list.
- partitionValues.put(key, "");
+ objectVal = null;
+ objectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
+ partitionTypes.put(key, PrimitiveCategory.STRING);
} else {
- partitionValues.put(key, partSpec.get(key));
+ // Create a Standard java object Inspector
+ objectInspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
+ TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i]));
+ objectVal =
+ ObjectInspectorConverters.
+ getConverter(PrimitiveObjectInspectorFactory.
+ javaStringObjectInspector, objectInspector).
+ convert(partSpec.get(key));
+ partitionTypes.put(key, TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i]).getPrimitiveCategory());
}
-
- partObjectInspectors
- .add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+ partitionValues.put(key, objectVal);
+ partObjectInspectors.add(objectInspector);
}
// Create partition OI
@@ -213,7 +246,7 @@
colsToInclude = ColumnProjectionUtils.getReadColumnIDs(hiveConf);
}
-
+
/**
* Creates a Vectorized row batch and the column vectors.
*
@@ -274,8 +307,7 @@
+ foi.getCategory());
default:
throw new HiveException("Unknown ObjectInspector category!");
-
- }
+ }
}
}
result.numCols = fieldRefs.size();
@@ -334,7 +366,7 @@
}
throw new HiveException("Not able to find column name in row object inspector");
}
-
+
/**
* Add the partition values to the batch
*
@@ -344,17 +376,165 @@
public void addPartitionColsToBatch(VectorizedRowBatch batch) throws HiveException
{
int colIndex;
- String value;
- BytesColumnVector bcv;
+ Object value;
+ PrimitiveCategory pCategory;
if (partitionValues != null) {
for (String key : partitionValues.keySet()) {
colIndex = getColIndexBasedOnColName(key);
value = partitionValues.get(key);
- bcv = (BytesColumnVector) batch.cols[colIndex];
- bcv.setRef(0, value.getBytes(), 0, value.length());
- bcv.isRepeating = true;
- bcv.isNull[0] = false;
- bcv.noNulls = true;
+ pCategory = partitionTypes.get(key);
+
+ switch (pCategory) {
+ case BOOLEAN: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill((Boolean)value == true ? 1 : 0);
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case BYTE: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill((Byte)value);
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case SHORT: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill((Short)value);
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case INT: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill((Integer)value);
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case LONG: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill((Long)value);
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case DATE: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill(((Date)value).getTime());
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case TIMESTAMP: {
+ LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ lcv.noNulls = false;
+ lcv.isNull[0] = true;
+ lcv.isRepeating = true;
+ } else {
+ lcv.fill((long)(((Timestamp) value).getTime()));
+ lcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case FLOAT: {
+ DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ dcv.noNulls = false;
+ dcv.isNull[0] = true;
+ dcv.isRepeating = true;
+ } else {
+ dcv.fill((Float) value);
+ dcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case DOUBLE: {
+ DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ dcv.noNulls = false;
+ dcv.isNull[0] = true;
+ dcv.isRepeating = true;
+ } else {
+ dcv.fill((Double) value);
+ dcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case DECIMAL: {
+ DecimalColumnVector dv = (DecimalColumnVector) batch.cols[colIndex];
+ if (value == null) {
+ dv.noNulls = false;
+ dv.isNull[0] = true;
+ dv.isRepeating = true;
+ } else {
+ HiveDecimal hd = (HiveDecimal)(value);
+ dv.vector[0] = new Decimal128(hd.toString(), (short)hd.scale());
+ dv.isRepeating = true;
+ dv.isNull[0] = false;
+ }
+ }
+ break;
+
+ case STRING: {
+ BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex];
+ String sVal = (String)value;
+ if (sVal == null) {
+ bcv.noNulls = false;
+ bcv.isNull[0] = true;
+ bcv.isRepeating = true;
+ } else {
+ bcv.fill(sVal.getBytes());
+ bcv.isNull[0] = false;
+ }
+ }
+ break;
+
+ default:
+ throw new HiveException("Unable to recognize the partition type " + pCategory +
+ " for column " + key);
+ }
}
}
}