Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (revision 1523736) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (working copy) @@ -180,34 +180,32 @@ return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primInputOI); case STRUCT: StructObjectInspector structOutputOI = (StructObjectInspector) outputOI; - if (structOutputOI.isSettable()) { - return outputOI; - } - else { - // create a standard settable struct object inspector - List listFields = structOutputOI.getAllStructFieldRefs(); - List structFieldNames = new ArrayList(listFields.size()); - List structFieldObjectInspectors = new ArrayList( - listFields.size()); + // create a standard settable struct object inspector + List listFields = structOutputOI.getAllStructFieldRefs(); + List structFieldNames = new ArrayList(listFields.size()); + List structFieldObjectInspectors = new ArrayList( + listFields.size()); - for (StructField listField : listFields) { - structFieldNames.add(listField.getFieldName()); - structFieldObjectInspectors.add(getConvertedOI(listField.getFieldObjectInspector(), - listField.getFieldObjectInspector(), false)); - } - - return ObjectInspectorFactory.getStandardStructObjectInspector( - structFieldNames, - structFieldObjectInspectors); + for (StructField listField : listFields) { + structFieldNames.add(listField.getFieldName()); + structFieldObjectInspectors.add(getConvertedOI(listField.getFieldObjectInspector(), + listField.getFieldObjectInspector(), false)); } + return ObjectInspectorFactory.getStandardStructObjectInspector( + structFieldNames, + structFieldObjectInspectors); case LIST: ListObjectInspector listOutputOI = (ListObjectInspector) outputOI; return ObjectInspectorFactory.getStandardListObjectInspector( - listOutputOI.getListElementObjectInspector()); + getConvertedOI(listOutputOI.getListElementObjectInspector(), + listOutputOI.getListElementObjectInspector(), false)); case MAP: MapObjectInspector mapOutputOI = (MapObjectInspector) outputOI; return ObjectInspectorFactory.getStandardMapObjectInspector( - mapOutputOI.getMapKeyObjectInspector(), mapOutputOI.getMapValueObjectInspector()); + getConvertedOI(mapOutputOI.getMapKeyObjectInspector(), + mapOutputOI.getMapKeyObjectInspector(), false), + getConvertedOI(mapOutputOI.getMapValueObjectInspector(), + mapOutputOI.getMapValueObjectInspector(), false)); default: throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() Index: ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out (revision 1523736) +++ ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out (working copy) @@ -1,12 +1,12 @@ -PREHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here. +PREHOOK: query: -- HIVE-5199, HIVE-5285 : CustomSerDe(1, 2, 3) are used here. -- The final results should be all NULL columns deserialized using --- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values +-- CustomSerDe(1, 2, 3) irrespective of the inserted values DROP TABLE PW17 PREHOOK: type: DROPTABLE -POSTHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here. +POSTHOOK: query: -- HIVE-5199, HIVE-5285 : CustomSerDe(1, 2, 3) are used here. -- The final results should be all NULL columns deserialized using --- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values +-- CustomSerDe(1, 2, 3) irrespective of the inserted values DROP TABLE PW17 POSTHOOK: type: DROPTABLE @@ -39,13 +39,13 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@pw17 POSTHOOK: Output: default@pw17 -PREHOOK: query: -- Without the fix, will throw cast exception via FetchOperator +PREHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via FetchOperator SELECT * FROM PW17 PREHOOK: type: QUERY PREHOOK: Input: default@pw17 PREHOOK: Input: default@pw17@year=1 #### A masked pattern was here #### -POSTHOOK: query: -- Without the fix, will throw cast exception via FetchOperator +POSTHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via FetchOperator SELECT * FROM PW17 POSTHOOK: type: QUERY POSTHOOK: Input: default@pw17 @@ -72,14 +72,89 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2 POSTHOOK: type: LOAD POSTHOOK: Output: default@pw17_2 -PREHOOK: query: -- Without the fix, will throw cast exception via MapOperator +PREHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via MapOperator SELECT COUNT(*) FROM PW17_2 PREHOOK: type: QUERY PREHOOK: Input: default@pw17_2 #### A masked pattern was here #### -POSTHOOK: query: -- Without the fix, will throw cast exception via MapOperator +POSTHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via MapOperator SELECT COUNT(*) FROM PW17_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@pw17_2 #### A masked pattern was here #### 4 +PREHOOK: query: DROP TABLE PW17_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE PW17_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@PW17_3 +PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1') +PREHOOK: type: LOAD +PREHOOK: Output: default@pw17_3 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1') +POSTHOOK: type: LOAD +POSTHOOK: Output: default@pw17_3 +POSTHOOK: Output: default@pw17_3@year=1 +PREHOOK: query: ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2' +PREHOOK: type: ALTERPARTITION_SERIALIZER +PREHOOK: Input: default@pw17_3 +PREHOOK: Output: default@pw17_3@year=1 +POSTHOOK: query: ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2' +POSTHOOK: type: ALTERPARTITION_SERIALIZER +POSTHOOK: Input: default@pw17_3 +POSTHOOK: Input: default@pw17_3@year=1 +POSTHOOK: Output: default@pw17_3@year=1 +PREHOOK: query: ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@pw17_3 +PREHOOK: Output: default@pw17_3 +POSTHOOK: query: ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@pw17_3 +POSTHOOK: Output: default@pw17_3 +PREHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via FetchOperator +SELECT * FROM PW17 +PREHOOK: type: QUERY +PREHOOK: Input: default@pw17 +PREHOOK: Input: default@pw17@year=1 +#### A masked pattern was here #### +POSTHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via FetchOperator +SELECT * FROM PW17 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pw17 +POSTHOOK: Input: default@pw17@year=1 +#### A masked pattern was here #### +NULL NULL 1 +NULL NULL 1 +NULL NULL 1 +NULL NULL 1 +PREHOOK: query: DROP TABLE PW17_4 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE PW17_4 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@PW17_4 +PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4 +PREHOOK: type: LOAD +PREHOOK: Output: default@pw17_4 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4 +POSTHOOK: type: LOAD +POSTHOOK: Output: default@pw17_4 +PREHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_4 +PREHOOK: type: QUERY +PREHOOK: Input: default@pw17_4 +#### A masked pattern was here #### +POSTHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pw17_4 +#### A masked pattern was here #### +4 Index: ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe3.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe3.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe3.java (revision 0) @@ -0,0 +1,59 @@ +package org.apache.hadoop.hive.serde2; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; + +public class CustomSerDe3 extends CustomSerDe1 { + @Override + public void initialize(Configuration conf, Properties tbl) + throws SerDeException { + + // Read the configuration parameters + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); + + // The input column can either be a string or a list of list of integer values. + List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnTypes = TypeInfoUtils + .getTypeInfosFromTypeString(columnTypeProperty); + assert columnNames.size() == columnTypes.size(); + numColumns = columnNames.size(); + + // No exception for type checking for simplicity + // Constructing the row ObjectInspector: + // The row consists of some string columns, some Array > columns. + List columnOIs = new ArrayList( + columnNames.size()); + for (int c = 0; c < numColumns; c++) { + if (columnTypes.get(c).equals(TypeInfoFactory.stringTypeInfo)) { + columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + } else { + // Blindly add this as a non settable list of list of integers, + // should be sufficient for the test case. + // Use the standard list object inspector. + columnOIs.add(ObjectInspectorFactory.getStandardListObjectInspector( + new CustomNonSettableListObjectInspector1(PrimitiveObjectInspectorFactory.javaIntObjectInspector))); + } + } + // Use non-settable struct object inspector. + rowOI = new CustomNonSettableStructObjectInspector1( + columnNames, columnOIs); + + // Constructing the row object, etc, which will be reused for all rows. + row = new ArrayList(numColumns); + for (int c = 0; c < numColumns; c++) { + row.add(null); + } + } +} Index: ql/src/test/queries/clientpositive/partition_wise_fileformat17.q =================================================================== --- ql/src/test/queries/clientpositive/partition_wise_fileformat17.q (revision 1523736) +++ ql/src/test/queries/clientpositive/partition_wise_fileformat17.q (working copy) @@ -1,6 +1,6 @@ --- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here. +-- HIVE-5199, HIVE-5285 : CustomSerDe(1, 2, 3) are used here. -- The final results should be all NULL columns deserialized using --- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values +-- CustomSerDe(1, 2, 3) irrespective of the inserted values DROP TABLE PW17; ADD JAR ../build/ql/test/test-serdes.jar; @@ -8,12 +8,27 @@ LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1'); ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'; ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'; --- Without the fix, will throw cast exception via FetchOperator +-- Without the fix HIVE-5199, will throw cast exception via FetchOperator SELECT * FROM PW17; -- Test for non-parititioned table. DROP TABLE PW17_2; CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'; LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2; --- Without the fix, will throw cast exception via MapOperator -SELECT COUNT(*) FROM PW17_2; \ No newline at end of file +-- Without the fix HIVE-5199, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_2; + +DROP TABLE PW17_3; +CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'; +LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1'); +ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'; +ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'; +-- Without the fix HIVE-5285, will throw cast exception via FetchOperator +SELECT * FROM PW17; + +DROP TABLE PW17_4; +CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'; +LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4; +-- Without the fix HIVE-5285, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_4; +