Index: data/files/pw17.txt
===================================================================
--- data/files/pw17.txt (revision 0)
+++ data/files/pw17.txt (revision 0)
@@ -0,0 +1,4 @@
+1 12 1
+2 23 2
+4 56 3
+6 8 4
Index: build-common.xml
===================================================================
--- build-common.xml (revision 1520717)
+++ build-common.xml (working copy)
@@ -331,6 +331,10 @@
+
+
+
+
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (revision 1520717)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (working copy)
@@ -25,16 +25,16 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBinaryObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableBooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDoubleObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
@@ -136,6 +136,7 @@
if (inputOI.equals(outputOI)) {
return new IdentityConverter();
}
+ // TODO: Add support for UNION once SettableUnionObjectInspector is implemented.
switch (outputOI.getCategory()) {
case PRIMITIVE:
return getConverter((PrimitiveObjectInspector) inputOI, (PrimitiveObjectInspector) outputOI);
@@ -155,39 +156,24 @@
}
}
- // Return the settable equivalent object inspector for primitive categories
- // For eg: for table T containing partitions p1 and p2 (possibly different
- // from the table T), return the settable inspector for T. The inspector for
- // T is settable recursively i.e all the nested fields are also settable.
- private static ObjectInspector getSettableConvertedOI(
- ObjectInspector inputOI) {
- switch (inputOI.getCategory()) {
- case PRIMITIVE:
- PrimitiveObjectInspector primInputOI = (PrimitiveObjectInspector) inputOI;
- return PrimitiveObjectInspectorFactory.
- getPrimitiveWritableObjectInspector(primInputOI.getPrimitiveCategory());
- case STRUCT:
- return inputOI;
- case LIST:
- return inputOI;
- case MAP:
- return inputOI;
- default:
- throw new RuntimeException("Hive internal error: desired OI of "
- + inputOI.getTypeName() + " not supported yet.");
- }
- }
-
public static ObjectInspector getConvertedOI(
ObjectInspector inputOI,
- ObjectInspector outputOI) {
+ ObjectInspector outputOI,
+ boolean equalsCheck) {
// If the inputOI is the same as the outputOI, just return it
- if (inputOI.equals(outputOI)) {
+ if (equalsCheck && inputOI.equals(outputOI)) {
return outputOI;
}
+ // Return the settable equivalent object inspector for primitive categories
+ // For eg: for table T containing partitions p1 and p2 (possibly different
+ // from the table T), return the settable inspector for T. The inspector for
+ // T is settable recursively i.e all the nested fields are also settable.
+ // TODO: Add support for UNION once SettableUnionObjectInspector is implemented.
switch (outputOI.getCategory()) {
case PRIMITIVE:
- return outputOI;
+ PrimitiveObjectInspector primInputOI = (PrimitiveObjectInspector) inputOI;
+ return PrimitiveObjectInspectorFactory.
+ getPrimitiveWritableObjectInspector(primInputOI.getPrimitiveCategory());
case STRUCT:
StructObjectInspector structOutputOI = (StructObjectInspector) outputOI;
if (structOutputOI.isSettable()) {
@@ -202,20 +188,22 @@
for (StructField listField : listFields) {
structFieldNames.add(listField.getFieldName());
- structFieldObjectInspectors.add(
- getSettableConvertedOI(listField.getFieldObjectInspector()));
+ structFieldObjectInspectors.add(getConvertedOI(listField.getFieldObjectInspector(),
+ listField.getFieldObjectInspector(), false));
}
- StandardStructObjectInspector structStandardOutputOI = ObjectInspectorFactory
- .getStandardStructObjectInspector(
+ return ObjectInspectorFactory.getStandardStructObjectInspector(
structFieldNames,
structFieldObjectInspectors);
- return structStandardOutputOI;
}
case LIST:
- return outputOI;
+ ListObjectInspector listOutputOI = (ListObjectInspector) outputOI;
+ return ObjectInspectorFactory.getStandardListObjectInspector(
+ listOutputOI.getListElementObjectInspector());
case MAP:
- return outputOI;
+ MapObjectInspector mapOutputOI = (MapObjectInspector) outputOI;
+ return ObjectInspectorFactory.getStandardMapObjectInspector(
+ mapOutputOI.getMapKeyObjectInspector(), mapOutputOI.getMapValueObjectInspector());
default:
throw new RuntimeException("Hive internal error: conversion of "
+ inputOI.getTypeName() + " to " + outputOI.getTypeName()
Index: ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out
===================================================================
--- ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out (revision 0)
+++ ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out (revision 0)
@@ -0,0 +1,85 @@
+PREHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here.
+-- The final results should be all NULL columns deserialized using
+-- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values
+
+DROP TABLE PW17
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here.
+-- The final results should be all NULL columns deserialized using
+-- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values
+
+DROP TABLE PW17
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@PW17
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@pw17
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@pw17
+POSTHOOK: Output: default@pw17@year=1
+PREHOOK: query: ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'
+PREHOOK: type: ALTERPARTITION_SERIALIZER
+PREHOOK: Input: default@pw17
+PREHOOK: Output: default@pw17@year=1
+POSTHOOK: query: ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'
+POSTHOOK: type: ALTERPARTITION_SERIALIZER
+POSTHOOK: Input: default@pw17
+POSTHOOK: Input: default@pw17@year=1
+POSTHOOK: Output: default@pw17@year=1
+PREHOOK: query: ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@pw17
+PREHOOK: Output: default@pw17
+POSTHOOK: query: ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@pw17
+POSTHOOK: Output: default@pw17
+PREHOOK: query: -- Without the fix, will throw cast exception via FetchOperator
+SELECT * FROM PW17
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pw17
+PREHOOK: Input: default@pw17@year=1
+#### A masked pattern was here ####
+POSTHOOK: query: -- Without the fix, will throw cast exception via FetchOperator
+SELECT * FROM PW17
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pw17
+POSTHOOK: Input: default@pw17@year=1
+#### A masked pattern was here ####
+NULL NULL 1
+NULL NULL 1
+NULL NULL 1
+NULL NULL 1
+PREHOOK: query: -- Test for non-parititioned table.
+DROP TABLE PW17_2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- Test for non-parititioned table.
+DROP TABLE PW17_2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@PW17_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
+PREHOOK: type: LOAD
+PREHOOK: Output: default@pw17_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@pw17_2
+PREHOOK: query: -- Without the fix, will throw cast exception via MapOperator
+SELECT COUNT(*) FROM PW17_2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pw17_2
+#### A masked pattern was here ####
+POSTHOOK: query: -- Without the fix, will throw cast exception via MapOperator
+SELECT COUNT(*) FROM PW17_2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pw17_2
+#### A masked pattern was here ####
+4
Index: ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java (revision 0)
@@ -0,0 +1,95 @@
+package org.apache.hadoop.hive.serde2;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+public class CustomSerDe2 extends AbstractSerDe {
+
+ int numColumns;
+
+ StructObjectInspector rowOI;
+ ArrayList row;
+
+ @Override
+ public void initialize(Configuration conf, Properties tbl)
+ throws SerDeException {
+
+ // Read the configuration parameters
+ String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
+ String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
+
+ // The input column can either be a string or a list of integer values.
+ List columnNames = Arrays.asList(columnNameProperty.split(","));
+ List columnTypes = TypeInfoUtils
+ .getTypeInfosFromTypeString(columnTypeProperty);
+ assert columnNames.size() == columnTypes.size();
+ numColumns = columnNames.size();
+
+ // No exception for type checking for simplicity
+ // Constructing the row ObjectInspector:
+ // The row consists of some string columns, some Array columns.
+ List columnOIs = new ArrayList(
+ columnNames.size());
+ for (int c = 0; c < numColumns; c++) {
+ if (columnTypes.get(c).equals(TypeInfoFactory.stringTypeInfo)) {
+ columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+ } else {
+ // Blindly add this as a integer list! Should be sufficient for the test case.
+ columnOIs.add(ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.javaIntObjectInspector));
+ }
+ }
+ // StandardStruct uses ArrayList to store the row.
+ rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(
+ columnNames, columnOIs);
+
+ // Constructing the row object, etc, which will be reused for all rows.
+ row = new ArrayList(numColumns);
+ for (int c = 0; c < numColumns; c++) {
+ row.add(null);
+ }
+ }
+
+ @Override
+ public ObjectInspector getObjectInspector() throws SerDeException {
+ return rowOI;
+ }
+
+ @Override
+ public Class extends Writable> getSerializedClass() {
+ return Text.class;
+ }
+
+ @Override
+ public Object deserialize(Writable blob) throws SerDeException {
+ // Now all the column values should always return NULL!
+ return row;
+ }
+
+ @Override
+ public Writable serialize(Object obj, ObjectInspector objInspector)
+ throws SerDeException {
+ return null;
+ }
+
+ @Override
+ public SerDeStats getSerDeStats() {
+ // no support for statistics
+ return null;
+ }
+
+}
Index: ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java (revision 0)
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+public class CustomNonSettableListObjectInspector1 implements ListObjectInspector {
+
+ ObjectInspector listElementObjectInspector;
+
+ protected CustomNonSettableListObjectInspector1(
+ ObjectInspector listElementObjectInspector) {
+ this.listElementObjectInspector = listElementObjectInspector;
+ }
+
+ public final Category getCategory() {
+ return Category.LIST;
+ }
+
+ // without data
+ public ObjectInspector getListElementObjectInspector() {
+ return listElementObjectInspector;
+ }
+
+ // Not supported for the test case
+ public Object getListElement(Object data, int index) {
+ return null;
+ }
+
+ // Not supported for the test case
+ public int getListLength(Object data) {
+ return 0;
+ }
+
+ // Not supported for the test case
+ public List> getList(Object data) {
+ return null;
+ }
+
+ public String getTypeName() {
+ return org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "<"
+ + listElementObjectInspector.getTypeName() + ">";
+ }
+}
+
Index: ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java (revision 0)
@@ -0,0 +1,112 @@
+package org.apache.hadoop.hive.serde2;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+public class CustomNonSettableStructObjectInspector1 extends
+StructObjectInspector {
+ public static final Log LOG = LogFactory
+ .getLog(CustomNonSettableStructObjectInspector1.class.getName());
+
+ protected static class MyField implements StructField {
+ protected int fieldID;
+ protected String fieldName;
+ protected ObjectInspector fieldObjectInspector;
+ protected String fieldComment;
+
+ public MyField(int fieldID, String fieldName,
+ ObjectInspector fieldObjectInspector) {
+ this.fieldID = fieldID;
+ this.fieldName = fieldName.toLowerCase();
+ this.fieldObjectInspector = fieldObjectInspector;
+ }
+
+ public MyField(int fieldID, String fieldName,
+ ObjectInspector fieldObjectInspector, String fieldComment) {
+ this(fieldID, fieldName, fieldObjectInspector);
+ this.fieldComment = fieldComment;
+ }
+
+ public int getFieldID() {
+ return fieldID;
+ }
+
+ public String getFieldName() {
+ return fieldName;
+ }
+
+ public ObjectInspector getFieldObjectInspector() {
+ return fieldObjectInspector;
+ }
+
+ public String getFieldComment() {
+ return fieldComment;
+ }
+
+ @Override
+ public String toString() {
+ return "" + fieldID + ":" + fieldName;
+ }
+ }
+
+ protected List fields;
+
+ public String getTypeName() {
+ return ObjectInspectorUtils.getStandardStructTypeName(this);
+ }
+
+ /**
+ * Call ObjectInspectorFactory.getNonSettableStructObjectInspector instead.
+ */
+ protected CustomNonSettableStructObjectInspector1(List structFieldNames,
+ List structFieldObjectInspectors) {
+ init(structFieldNames, structFieldObjectInspectors);
+ }
+
+ protected void init(List structFieldNames,
+ List structFieldObjectInspectors) {
+ assert (structFieldNames.size() == structFieldObjectInspectors.size());
+
+ fields = new ArrayList(structFieldNames.size());
+ for (int i = 0; i < structFieldNames.size(); i++) {
+ fields.add(new MyField(i, structFieldNames.get(i),
+ structFieldObjectInspectors.get(i), null));
+ }
+ }
+
+ public final Category getCategory() {
+ return Category.STRUCT;
+ }
+
+ // Without Data
+ @Override
+ public StructField getStructFieldRef(String fieldName) {
+ return ObjectInspectorUtils.getStandardStructFieldRef(fieldName, fields);
+ }
+
+ @Override
+ public List extends StructField> getAllStructFieldRefs() {
+ return fields;
+ }
+
+ // With Data - Unsupported for the test case
+ @Override
+ @SuppressWarnings("unchecked")
+ public Object getStructFieldData(Object data, StructField fieldRef) {
+ return null;
+ }
+
+ // Unsupported for the test case
+ @Override
+ @SuppressWarnings("unchecked")
+ public List