From ae330642fc8f119c7384ebe54f9abf5ecca06b8d Mon Sep 17 00:00:00 2001 From: System Administrator Date: Tue, 17 Sep 2013 18:43:28 -0700 Subject: [PATCH] HIVE-5199: 0.12 branch --- build-common.xml | 4 + data/files/pw17.txt | 4 + .../apache/hadoop/hive/ql/exec/FetchOperator.java | 4 +- .../apache/hadoop/hive/ql/exec/MapOperator.java | 4 +- .../CustomNonSettableListObjectInspector1.java | 64 ++++++++++++ .../CustomNonSettableStructObjectInspector1.java | 112 +++++++++++++++++++++ .../apache/hadoop/hive/serde2/CustomSerDe1.java | 95 +++++++++++++++++ .../apache/hadoop/hive/serde2/CustomSerDe2.java | 95 +++++++++++++++++ .../clientpositive/partition_wise_fileformat17.q | 19 ++++ .../partition_wise_fileformat17.q.out | 85 ++++++++++++++++ .../objectinspector/ObjectInspectorConverters.java | 53 ++++------ 11 files changed, 503 insertions(+), 36 deletions(-) create mode 100644 data/files/pw17.txt create mode 100644 ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java create mode 100644 ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java create mode 100644 ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java create mode 100644 ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java create mode 100644 ql/src/test/queries/clientpositive/partition_wise_fileformat17.q create mode 100644 ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out mode change 100644 => 100755 serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java diff --git a/build-common.xml b/build-common.xml index ad5ac23..e3c5bb0 100644 --- a/build-common.xml +++ b/build-common.xml @@ -331,6 +331,10 @@ + + + + diff --git a/data/files/pw17.txt b/data/files/pw17.txt new file mode 100644 index 0000000..041944a --- /dev/null +++ b/data/files/pw17.txt @@ -0,0 +1,4 @@ +1 12 1 +2 23 2 +4 56 3 +6 8 4 \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index dc5ebb7..d2265e2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -402,7 +402,7 @@ private void getNextPath() throws Exception { ObjectInspector outputOI = ObjectInspectorConverters.getConvertedOI( serde.getObjectInspector(), - partitionedTableOI == null ? tblSerde.getObjectInspector() : partitionedTableOI); + partitionedTableOI == null ? tblSerde.getObjectInspector() : partitionedTableOI, true); partTblObjectInspectorConverter = ObjectInspectorConverters.getConverter( serde.getObjectInspector(), outputOI); @@ -628,7 +628,7 @@ public ObjectInspector getOutputObjectInspector() throws HiveException { partSerde.initialize(job, listPart.getOverlayedProperties()); partitionedTableOI = ObjectInspectorConverters.getConvertedOI( - partSerde.getObjectInspector(), tableOI); + partSerde.getObjectInspector(), tableOI, true); if (!partitionedTableOI.equals(tableOI)) { break; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java index 8122afa..529b125 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java @@ -32,8 +32,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.plan.MapWork; @@ -310,7 +310,7 @@ private MapOpCtx initObjectInspector(Configuration hconf, MapInputPath ctx, tblRawRowObjectInspector = (StructObjectInspector) ObjectInspectorConverters.getConvertedOI( partRawRowObjectInspector, - tblDeserializer.getObjectInspector()); + tblDeserializer.getObjectInspector(), true); if (identityConverterTableDesc.contains(tableDesc)) { if (!partRawRowObjectInspector.equals(tblRawRowObjectInspector)) { diff --git a/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java b/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java new file mode 100644 index 0000000..867e030 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2; + +import java.util.List; + +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +public class CustomNonSettableListObjectInspector1 implements ListObjectInspector { + + ObjectInspector listElementObjectInspector; + + protected CustomNonSettableListObjectInspector1( + ObjectInspector listElementObjectInspector) { + this.listElementObjectInspector = listElementObjectInspector; + } + + public final Category getCategory() { + return Category.LIST; + } + + // without data + public ObjectInspector getListElementObjectInspector() { + return listElementObjectInspector; + } + + // Not supported for the test case + public Object getListElement(Object data, int index) { + return null; + } + + // Not supported for the test case + public int getListLength(Object data) { + return 0; + } + + // Not supported for the test case + public List getList(Object data) { + return null; + } + + public String getTypeName() { + return org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "<" + + listElementObjectInspector.getTypeName() + ">"; + } +} + diff --git a/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java b/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java new file mode 100644 index 0000000..52c82a5 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java @@ -0,0 +1,112 @@ +package org.apache.hadoop.hive.serde2; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; + +public class CustomNonSettableStructObjectInspector1 extends +StructObjectInspector { + public static final Log LOG = LogFactory + .getLog(CustomNonSettableStructObjectInspector1.class.getName()); + + protected static class MyField implements StructField { + protected int fieldID; + protected String fieldName; + protected ObjectInspector fieldObjectInspector; + protected String fieldComment; + + public MyField(int fieldID, String fieldName, + ObjectInspector fieldObjectInspector) { + this.fieldID = fieldID; + this.fieldName = fieldName.toLowerCase(); + this.fieldObjectInspector = fieldObjectInspector; + } + + public MyField(int fieldID, String fieldName, + ObjectInspector fieldObjectInspector, String fieldComment) { + this(fieldID, fieldName, fieldObjectInspector); + this.fieldComment = fieldComment; + } + + public int getFieldID() { + return fieldID; + } + + public String getFieldName() { + return fieldName; + } + + public ObjectInspector getFieldObjectInspector() { + return fieldObjectInspector; + } + + public String getFieldComment() { + return fieldComment; + } + + @Override + public String toString() { + return "" + fieldID + ":" + fieldName; + } + } + + protected List fields; + + public String getTypeName() { + return ObjectInspectorUtils.getStandardStructTypeName(this); + } + + /** + * Call ObjectInspectorFactory.getNonSettableStructObjectInspector instead. + */ + protected CustomNonSettableStructObjectInspector1(List structFieldNames, + List structFieldObjectInspectors) { + init(structFieldNames, structFieldObjectInspectors); + } + + protected void init(List structFieldNames, + List structFieldObjectInspectors) { + assert (structFieldNames.size() == structFieldObjectInspectors.size()); + + fields = new ArrayList(structFieldNames.size()); + for (int i = 0; i < structFieldNames.size(); i++) { + fields.add(new MyField(i, structFieldNames.get(i), + structFieldObjectInspectors.get(i), null)); + } + } + + public final Category getCategory() { + return Category.STRUCT; + } + + // Without Data + @Override + public StructField getStructFieldRef(String fieldName) { + return ObjectInspectorUtils.getStandardStructFieldRef(fieldName, fields); + } + + @Override + public List getAllStructFieldRefs() { + return fields; + } + + // With Data - Unsupported for the test case + @Override + @SuppressWarnings("unchecked") + public Object getStructFieldData(Object data, StructField fieldRef) { + return null; + } + + // Unsupported for the test case + @Override + @SuppressWarnings("unchecked") + public List getStructFieldsDataAsList(Object data) { + return null; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java b/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java new file mode 100644 index 0000000..42c1056 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java @@ -0,0 +1,95 @@ +package org.apache.hadoop.hive.serde2; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; + +public class CustomSerDe1 extends AbstractSerDe { + + int numColumns; + + StructObjectInspector rowOI; + ArrayList row; + + @Override + public void initialize(Configuration conf, Properties tbl) + throws SerDeException { + + // Read the configuration parameters + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); + + // The input column can either be a string or a list of integer values. + List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnTypes = TypeInfoUtils + .getTypeInfosFromTypeString(columnTypeProperty); + assert columnNames.size() == columnTypes.size(); + numColumns = columnNames.size(); + + // No exception for type checking for simplicity + // Constructing the row ObjectInspector: + // The row consists of some string columns, some Array columns. + List columnOIs = new ArrayList( + columnNames.size()); + for (int c = 0; c < numColumns; c++) { + if (columnTypes.get(c).equals(TypeInfoFactory.stringTypeInfo)) { + columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + } else { + // Blindly add this as a integer list, should be sufficient for the test case. + // Use the non-settable list object inspector. + columnOIs.add(new CustomNonSettableListObjectInspector1( + PrimitiveObjectInspectorFactory.javaIntObjectInspector)); + } + } + // Use non-settable struct object inspector. + rowOI = new CustomNonSettableStructObjectInspector1( + columnNames, columnOIs); + + // Constructing the row object, etc, which will be reused for all rows. + row = new ArrayList(numColumns); + for (int c = 0; c < numColumns; c++) { + row.add(null); + } + } + + @Override + public ObjectInspector getObjectInspector() throws SerDeException { + return rowOI; + } + + @Override + public Class getSerializedClass() { + return Text.class; + } + + @Override + public Object deserialize(Writable blob) throws SerDeException { + // Now all the column values should always return NULL! + return row; + } + + @Override + public Writable serialize(Object obj, ObjectInspector objInspector) + throws SerDeException { + return null; + } + + @Override + public SerDeStats getSerDeStats() { + // no support for statistics + return null; + } + +} diff --git a/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java b/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java new file mode 100644 index 0000000..d801fca --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java @@ -0,0 +1,95 @@ +package org.apache.hadoop.hive.serde2; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; + +public class CustomSerDe2 extends AbstractSerDe { + + int numColumns; + + StructObjectInspector rowOI; + ArrayList row; + + @Override + public void initialize(Configuration conf, Properties tbl) + throws SerDeException { + + // Read the configuration parameters + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); + + // The input column can either be a string or a list of integer values. + List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnTypes = TypeInfoUtils + .getTypeInfosFromTypeString(columnTypeProperty); + assert columnNames.size() == columnTypes.size(); + numColumns = columnNames.size(); + + // No exception for type checking for simplicity + // Constructing the row ObjectInspector: + // The row consists of some string columns, some Array columns. + List columnOIs = new ArrayList( + columnNames.size()); + for (int c = 0; c < numColumns; c++) { + if (columnTypes.get(c).equals(TypeInfoFactory.stringTypeInfo)) { + columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + } else { + // Blindly add this as a integer list! Should be sufficient for the test case. + columnOIs.add(ObjectInspectorFactory.getStandardListObjectInspector( + PrimitiveObjectInspectorFactory.javaIntObjectInspector)); + } + } + // StandardStruct uses ArrayList to store the row. + rowOI = ObjectInspectorFactory.getStandardStructObjectInspector( + columnNames, columnOIs); + + // Constructing the row object, etc, which will be reused for all rows. + row = new ArrayList(numColumns); + for (int c = 0; c < numColumns; c++) { + row.add(null); + } + } + + @Override + public ObjectInspector getObjectInspector() throws SerDeException { + return rowOI; + } + + @Override + public Class getSerializedClass() { + return Text.class; + } + + @Override + public Object deserialize(Writable blob) throws SerDeException { + // Now all the column values should always return NULL! + return row; + } + + @Override + public Writable serialize(Object obj, ObjectInspector objInspector) + throws SerDeException { + return null; + } + + @Override + public SerDeStats getSerDeStats() { + // no support for statistics + return null; + } + +} diff --git a/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q b/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q new file mode 100644 index 0000000..f07810d --- /dev/null +++ b/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q @@ -0,0 +1,19 @@ +-- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here. +-- The final results should be all NULL columns deserialized using +-- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values + +DROP TABLE PW17; +ADD JAR ../build/ql/test/test-serdes.jar; +CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'; +LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1'); +ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'; +ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'; +-- Without the fix, will throw cast exception via FetchOperator +SELECT * FROM PW17; + +-- Test for non-parititioned table. +DROP TABLE PW17_2; +CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'; +LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2; +-- Without the fix, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_2; \ No newline at end of file diff --git a/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out b/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out new file mode 100644 index 0000000..84dbd98 --- /dev/null +++ b/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out @@ -0,0 +1,85 @@ +PREHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here. +-- The final results should be all NULL columns deserialized using +-- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values + +DROP TABLE PW17 +PREHOOK: type: DROPTABLE +POSTHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here. +-- The final results should be all NULL columns deserialized using +-- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values + +DROP TABLE PW17 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@PW17 +PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1') +PREHOOK: type: LOAD +PREHOOK: Output: default@pw17 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1') +POSTHOOK: type: LOAD +POSTHOOK: Output: default@pw17 +POSTHOOK: Output: default@pw17@year=1 +PREHOOK: query: ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2' +PREHOOK: type: ALTERPARTITION_SERIALIZER +PREHOOK: Input: default@pw17 +PREHOOK: Output: default@pw17@year=1 +POSTHOOK: query: ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2' +POSTHOOK: type: ALTERPARTITION_SERIALIZER +POSTHOOK: Input: default@pw17 +POSTHOOK: Input: default@pw17@year=1 +POSTHOOK: Output: default@pw17@year=1 +PREHOOK: query: ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@pw17 +PREHOOK: Output: default@pw17 +POSTHOOK: query: ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@pw17 +POSTHOOK: Output: default@pw17 +PREHOOK: query: -- Without the fix, will throw cast exception via FetchOperator +SELECT * FROM PW17 +PREHOOK: type: QUERY +PREHOOK: Input: default@pw17 +PREHOOK: Input: default@pw17@year=1 +#### A masked pattern was here #### +POSTHOOK: query: -- Without the fix, will throw cast exception via FetchOperator +SELECT * FROM PW17 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pw17 +POSTHOOK: Input: default@pw17@year=1 +#### A masked pattern was here #### +NULL NULL 1 +NULL NULL 1 +NULL NULL 1 +NULL NULL 1 +PREHOOK: query: -- Test for non-parititioned table. +DROP TABLE PW17_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: -- Test for non-parititioned table. +DROP TABLE PW17_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@PW17_2 +PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2 +PREHOOK: type: LOAD +PREHOOK: Output: default@pw17_2 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2 +POSTHOOK: type: LOAD +POSTHOOK: Output: default@pw17_2 +PREHOOK: query: -- Without the fix, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pw17_2 +#### A masked pattern was here #### +POSTHOOK: query: -- Without the fix, will throw cast exception via MapOperator +SELECT COUNT(*) FROM PW17_2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pw17_2 +#### A masked pattern was here #### +4 diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java old mode 100644 new mode 100755 index 8f700b3a..485b3d1 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java @@ -142,6 +142,7 @@ public static Converter getConverter(ObjectInspector inputOI, if (inputOI.equals(outputOI)) { return new IdentityConverter(); } + // TODO: Add support for UNION once SettableUnionObjectInspector is implemented. switch (outputOI.getCategory()) { case PRIMITIVE: return getConverter((PrimitiveObjectInspector) inputOI, (PrimitiveObjectInspector) outputOI); @@ -161,38 +162,24 @@ public static Converter getConverter(ObjectInspector inputOI, } } - // Return the settable equivalent object inspector for primitive categories - // For eg: for table T containing partitions p1 and p2 (possibly different - // from the table T), return the settable inspector for T. The inspector for - // T is settable recursively i.e all the nested fields are also settable. - private static ObjectInspector getSettableConvertedOI( - ObjectInspector inputOI) { - switch (inputOI.getCategory()) { - case PRIMITIVE: - PrimitiveObjectInspector primInputOI = (PrimitiveObjectInspector) inputOI; - return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primInputOI); - case STRUCT: - return inputOI; - case LIST: - return inputOI; - case MAP: - return inputOI; - default: - throw new RuntimeException("Hive internal error: desired OI of " - + inputOI.getTypeName() + " not supported yet."); - } - } - public static ObjectInspector getConvertedOI( ObjectInspector inputOI, - ObjectInspector outputOI) { + ObjectInspector outputOI, + boolean equalsCheck) { // If the inputOI is the same as the outputOI, just return it - if (inputOI.equals(outputOI)) { + if (equalsCheck && inputOI.equals(outputOI)) { return outputOI; } + // Return the settable equivalent object inspector for primitive categories + // For eg: for table T containing partitions p1 and p2 (possibly different + // from the table T), return the settable inspector for T. The inspector for + // T is settable recursively i.e all the nested fields are also settable. + // TODO: Add support for UNION once SettableUnionObjectInspector is implemented. switch (outputOI.getCategory()) { case PRIMITIVE: - return outputOI; + PrimitiveObjectInspector primInputOI = (PrimitiveObjectInspector) inputOI; + return PrimitiveObjectInspectorFactory. + getPrimitiveWritableObjectInspector(primInputOI.getPrimitiveCategory()); case STRUCT: StructObjectInspector structOutputOI = (StructObjectInspector) outputOI; if (structOutputOI.isSettable()) { @@ -207,20 +194,22 @@ public static ObjectInspector getConvertedOI( for (StructField listField : listFields) { structFieldNames.add(listField.getFieldName()); - structFieldObjectInspectors.add( - getSettableConvertedOI(listField.getFieldObjectInspector())); + structFieldObjectInspectors.add(getConvertedOI(listField.getFieldObjectInspector(), + listField.getFieldObjectInspector(), false)); } - StandardStructObjectInspector structStandardOutputOI = ObjectInspectorFactory - .getStandardStructObjectInspector( + return ObjectInspectorFactory.getStandardStructObjectInspector( structFieldNames, structFieldObjectInspectors); - return structStandardOutputOI; } case LIST: - return outputOI; + ListObjectInspector listOutputOI = (ListObjectInspector) outputOI; + return ObjectInspectorFactory.getStandardListObjectInspector( + listOutputOI.getListElementObjectInspector()); case MAP: - return outputOI; + MapObjectInspector mapOutputOI = (MapObjectInspector) outputOI; + return ObjectInspectorFactory.getStandardMapObjectInspector( + mapOutputOI.getMapKeyObjectInspector(), mapOutputOI.getMapValueObjectInspector()); default: throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() -- 1.7.12.4 (Apple Git-37)