diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java index 3bafb81..d25b2e8 100755 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java @@ -59,11 +59,11 @@ public Object getValue() { @Override public ConstantObjectInspector getWritableObjectInspector() { - PrimitiveCategory pc = ((PrimitiveTypeInfo)getTypeInfo()) - .getPrimitiveCategory(); + PrimitiveTypeInfo pti = (PrimitiveTypeInfo) getTypeInfo(); + PrimitiveCategory pc = pti.getPrimitiveCategory(); // Convert from Java to Writable Object writableValue = PrimitiveObjectInspectorFactory - .getPrimitiveJavaObjectInspector(pc).getPrimitiveWritableObject( + .getPrimitiveJavaObjectInspector(pti).getPrimitiveWritableObject( getValue()); return PrimitiveObjectInspectorFactory .getPrimitiveWritableConstantObjectInspector((PrimitiveTypeInfo) getTypeInfo(), writableValue); diff --git ql/src/test/queries/clientpositive/partition_varchar2.q ql/src/test/queries/clientpositive/partition_varchar2.q new file mode 100644 index 0000000..92cb742 --- /dev/null +++ ql/src/test/queries/clientpositive/partition_varchar2.q @@ -0,0 +1,10 @@ +drop table partition_varchar_2; + +create table partition_varchar_2 (key string, value varchar(20)) partitioned by (dt varchar(15), region int); + +insert overwrite table partition_varchar_2 partition(dt='2000-01-01', region=1) + select * from src order by key limit 1; + +select * from partition_varchar_2 where cast(dt as varchar(10)) = '2000-01-01'; + +drop table partition_varchar_2; diff --git ql/src/test/results/clientpositive/partition_varchar2.q.out ql/src/test/results/clientpositive/partition_varchar2.q.out new file mode 100644 index 0000000..0625cf0 --- /dev/null +++ ql/src/test/results/clientpositive/partition_varchar2.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: drop table partition_varchar_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table partition_varchar_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table partition_varchar_2 (key string, value varchar(20)) partitioned by (dt varchar(15), region int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table partition_varchar_2 (key string, value varchar(20)) partitioned by (dt varchar(15), region int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@partition_varchar_2 +PREHOOK: query: insert overwrite table partition_varchar_2 partition(dt='2000-01-01', region=1) + select * from src order by key limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partition_varchar_2@dt=2000-01-01/region=1 +POSTHOOK: query: insert overwrite table partition_varchar_2 partition(dt='2000-01-01', region=1) + select * from src order by key limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partition_varchar_2@dt=2000-01-01/region=1 +POSTHOOK: Lineage: partition_varchar_2 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_varchar_2 PARTITION(dt=2000-01-01,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select * from partition_varchar_2 where cast(dt as varchar(10)) = '2000-01-01' +PREHOOK: type: QUERY +PREHOOK: Input: default@partition_varchar_2 +PREHOOK: Input: default@partition_varchar_2@dt=2000-01-01/region=1 +#### A masked pattern was here #### +POSTHOOK: query: select * from partition_varchar_2 where cast(dt as varchar(10)) = '2000-01-01' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partition_varchar_2 +POSTHOOK: Input: default@partition_varchar_2@dt=2000-01-01/region=1 +#### A masked pattern was here #### +POSTHOOK: Lineage: partition_varchar_2 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_varchar_2 PARTITION(dt=2000-01-01,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +0 val_0 2000-01-01 1 +PREHOOK: query: drop table partition_varchar_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partition_varchar_2 +PREHOOK: Output: default@partition_varchar_2 +POSTHOOK: query: drop table partition_varchar_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partition_varchar_2 +POSTHOOK: Output: default@partition_varchar_2 +POSTHOOK: Lineage: partition_varchar_2 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_varchar_2 PARTITION(dt=2000-01-01,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java index fc0cee6..53b0615 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java @@ -22,7 +22,6 @@ import java.util.HashMap; import java.util.Map; -import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -73,6 +72,8 @@ new WritableDoubleObjectInspector(); public static final WritableStringObjectInspector writableStringObjectInspector = new WritableStringObjectInspector(); + public static final WritableHiveVarcharObjectInspector writableHiveVarcharObjectInspector = + new WritableHiveVarcharObjectInspector((VarcharTypeInfo) TypeInfoFactory.varcharTypeInfo); public static final WritableVoidObjectInspector writableVoidObjectInspector = new WritableVoidObjectInspector(); public static final WritableDateObjectInspector writableDateObjectInspector = @@ -104,6 +105,7 @@ writableDoubleObjectInspector); cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME), writableStringObjectInspector); + cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.varcharTypeInfo, writableHiveVarcharObjectInspector); cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.VOID_TYPE_NAME), writableVoidObjectInspector); cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME), @@ -127,6 +129,7 @@ primitiveCategoryToWritableOI.put(PrimitiveCategory.FLOAT, writableFloatObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.DOUBLE, writableDoubleObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.STRING, writableStringObjectInspector); + primitiveCategoryToWritableOI.put(PrimitiveCategory.VARCHAR, writableHiveVarcharObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.VOID, writableVoidObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.DATE, writableDateObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.TIMESTAMP, writableTimestampObjectInspector); @@ -150,6 +153,8 @@ new JavaDoubleObjectInspector(); public static final JavaStringObjectInspector javaStringObjectInspector = new JavaStringObjectInspector(); + public static final JavaHiveVarcharObjectInspector javaHiveVarcharObjectInspector = + new JavaHiveVarcharObjectInspector((VarcharTypeInfo) TypeInfoFactory.varcharTypeInfo); public static final JavaVoidObjectInspector javaVoidObjectInspector = new JavaVoidObjectInspector(); public static final JavaDateObjectInspector javaDateObjectInspector = @@ -181,6 +186,7 @@ javaDoubleObjectInspector); cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME), javaStringObjectInspector); + cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.varcharTypeInfo, javaHiveVarcharObjectInspector); cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.VOID_TYPE_NAME), javaVoidObjectInspector); cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME), @@ -204,6 +210,7 @@ primitiveCategoryToJavaOI.put(PrimitiveCategory.FLOAT, javaFloatObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.DOUBLE, javaDoubleObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.STRING, javaStringObjectInspector); + primitiveCategoryToJavaOI.put(PrimitiveCategory.VARCHAR, javaHiveVarcharObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.VOID, javaVoidObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.DATE, javaDateObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.TIMESTAMP, javaTimestampObjectInspector); diff --git serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java index 13d1ec0..de8ea7e 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java +++ serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry; @@ -47,6 +48,7 @@ private TypeInfoFactory() { public static final PrimitiveTypeInfo intTypeInfo = new PrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME); public static final PrimitiveTypeInfo longTypeInfo = new PrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME); public static final PrimitiveTypeInfo stringTypeInfo = new PrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME); + public static final PrimitiveTypeInfo varcharTypeInfo = new VarcharTypeInfo(HiveVarchar.MAX_VARCHAR_LENGTH); public static final PrimitiveTypeInfo floatTypeInfo = new PrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME); public static final PrimitiveTypeInfo doubleTypeInfo = new PrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME); public static final PrimitiveTypeInfo byteTypeInfo = new PrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME); @@ -68,6 +70,7 @@ private TypeInfoFactory() { cachedPrimitiveTypeInfo.put(serdeConstants.INT_TYPE_NAME, intTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.BIGINT_TYPE_NAME, longTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.STRING_TYPE_NAME, stringTypeInfo); + cachedPrimitiveTypeInfo.put(varcharTypeInfo.getQualifiedName(), varcharTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.FLOAT_TYPE_NAME, floatTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.DOUBLE_TYPE_NAME, doubleTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.TINYINT_TYPE_NAME, byteTypeInfo); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java new file mode 100644 index 0000000..95197d4 --- /dev/null +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; + +public class TestPrimitiveObjectInspectorFactory extends TestCase { + + public void testGetPrimitiveWritableObjectInspector() { + // even without type params, return a default OI for varchar + PrimitiveObjectInspector poi = PrimitiveObjectInspectorFactory + .getPrimitiveWritableObjectInspector(PrimitiveCategory.VARCHAR); + assertEquals(poi, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); + } + + public void testGetPrimitiveJavaObjectInspector() { + // even without type params, return a default OI for varchar + PrimitiveObjectInspector poi = PrimitiveObjectInspectorFactory + .getPrimitiveJavaObjectInspector(PrimitiveCategory.VARCHAR); + assertEquals(poi, PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); + } +}