Index: serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java (revision 1197870) +++ serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java (working copy) @@ -30,7 +30,7 @@ * StructTypeInfo represents the TypeInfo of a struct. A struct contains one or * more fields each of which has a unique name and its own TypeInfo. Different * fields can have the same or different TypeInfo. - * + * * Always use the TypeInfoFactory to create new TypeInfo objects, instead of * directly creating an instance of this class. */ @@ -82,10 +82,8 @@ * For TypeInfoFactory use only. */ StructTypeInfo(List names, List typeInfos) { - allStructFieldNames = new ArrayList(); - allStructFieldNames.addAll(names); - allStructFieldTypeInfos = new ArrayList(); - allStructFieldTypeInfos.addAll(typeInfos); + allStructFieldNames = new ArrayList(names); + allStructFieldTypeInfos = new ArrayList(typeInfos); } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (revision 1197870) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (working copy) @@ -36,9 +36,9 @@ import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapred.JobConf; @@ -71,8 +71,8 @@ /** * Other than gathering statistics for the ANALYZE command, the table scan operator * does not do anything special other than just forwarding the row. Since the table - * data is always read as part of the map-reduce framework by the mapper. But, this - * assumption is not true, i.e table data is not only read by the mapper, this + * data is always read as part of the map-reduce framework by the mapper. But, when this + * assumption stops to be true, i.e table data won't be only read by the mapper, this * operator will be enhanced to read the table. **/ @Override Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java (revision 1197870) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java (working copy) @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; +import java.util.HashSet; + import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -25,6 +27,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.BooleanWritable; @@ -50,9 +53,11 @@ public class GenericUDFIn extends GenericUDF { private ObjectInspector[] argumentOIs; + private HashSet inSet; + BooleanWritable bw = new BooleanWritable(); - ReturnObjectInspectorResolver conversionHelper = null; + ReturnObjectInspectorResolver conversionHelper; ObjectInspector compareOI; @Override @@ -92,6 +97,15 @@ return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; } + private void prepareInSet(DeferredObject[] arguments) throws HiveException { + inSet = new HashSet(); + for (int i = 1; i < arguments.length; ++i) { + inSet.add(((PrimitiveObjectInspector) compareOI).getPrimitiveJavaObject(conversionHelper + .convertIfNecessary(arguments[i].get(), argumentOIs[i]))); + } + } + + @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { bw.set(false); @@ -100,21 +114,36 @@ return null; } - for (int i=1; i