Index: ql/src/test/org/apache/hadoop/hive/ql/testutil/DataBuilder.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/testutil/DataBuilder.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/testutil/DataBuilder.java (working copy) @@ -0,0 +1,50 @@ +package org.apache.hadoop.hive.ql.testutil; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; + +class DataBuilder { + + public DataBuilder(){ + columnNames = new ArrayList(); + columnTypes = new ArrayList(); + rows = new ArrayList>(); + } + + private final List columnNames; + private final List columnTypes; + private final List> rows; + + public void setColumnNames(String ... names){ + for (String name: names){ + columnNames.add(name); + } + } + + public void setColumnTypes(ObjectInspector ... types){ + for (ObjectInspector type: types){ + columnTypes.add(type); + } + } + + public void addRow(Object ... columns){ + List objects = Arrays.asList(columns); + rows.add(objects); + } + + public InspectableObject[] createRows(){ + InspectableObject[] toReturn = new InspectableObject[this.rows.size()]; + for (int i=0; i createOutputColumnNames(List expressionList){ + List outputCols = new ArrayList(); + for (int i = 0; i < expressionList.size(); i++) { + outputCols.add("_col" + i); + } + return outputCols; + } + + /** + * Given a select operator and a collectOperator feed the sourceData into the operator + * tree and assert that each row matches the expectedResult + * @param selectOp + * @param collectOp + * @param sourceData + * @param expected + * @throws HiveException + */ + public static void assertResults(Operator selectOp, CollectOperator collectOp, + InspectableObject [] sourceData, InspectableObject [] expected) throws HiveException { + InspectableObject resultRef = new InspectableObject(); + for (int i = 0; i < sourceData.length; i++) { + selectOp.process(sourceData[i].o, 0); + collectOp.retrieve(resultRef); + StructObjectInspector expectedOi = (StructObjectInspector) expected[i].oi; + List expectedFields = expectedOi.getAllStructFieldRefs(); + StructObjectInspector destinationOi = (StructObjectInspector) resultRef.oi; + List destinationFields = destinationOi.getAllStructFieldRefs(); + Assert.assertEquals("Source and destination have differing numbers of fields " , expectedFields.size(), destinationFields.size()); + for (StructField field : expectedFields){ + StructField dest = expectedOi.getStructFieldRef(field.getFieldName()); + Assert.assertNotNull("Cound not find column named "+field.getFieldName(), dest); + Assert.assertEquals(field.getFieldObjectInspector(), dest.getFieldObjectInspector()); + Assert.assertEquals("comparing " + + expectedOi.getStructFieldData(expected[i].o, field)+" "+ + field.getFieldObjectInspector()+" to "+ + destinationOi.getStructFieldData(resultRef.o, dest) + " " + + dest.getFieldObjectInspector(), 0, + ObjectInspectorUtils.compare( + expectedOi.getStructFieldData(expected[i].o, field), field.getFieldObjectInspector(), + destinationOi.getStructFieldData(resultRef.o, dest), dest.getFieldObjectInspector() + ) + ); + } + + } + selectOp.close(false); + } + +} Index: ql/src/test/org/apache/hadoop/hive/ql/testutil/SimpleConcatTest.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/testutil/SimpleConcatTest.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/testutil/SimpleConcatTest.java (working copy) @@ -0,0 +1,65 @@ +package org.apache.hadoop.hive.ql.testutil; + +import java.util.ArrayList; +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.ql.exec.CollectOperator; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.OperatorFactory; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; +import org.apache.hadoop.hive.ql.plan.CollectDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.SelectDesc; +import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.mapred.JobConf; + +public class SimpleConcatTest extends TestCase { + + public static InspectableObject [] getBasicTable(){ + DataBuilder db = new DataBuilder(); + db.setColumnNames("a", "b", "c"); + db.setColumnTypes( + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.javaStringObjectInspector); + db.addRow("one", "two", "three"); + db.addRow("four","two", "three"); + db.addRow( null, "two", "three"); + return db.createRows(); + } + + public static InspectableObject [] getExpectedResult(){ + DataBuilder db = new DataBuilder(); + db.setColumnNames("_col1", "_col2"); + db.setColumnTypes(PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.javaStringObjectInspector); + db.addRow("one", "onetwo"); + db.addRow("four", "fourtwo"); + db.addRow(null, null); + return db.createRows(); + } + + public void testConcatUdf() throws Throwable { + InspectableObject[] data = getBasicTable(); + ExprNodeDesc expr1 = OperatorTestUtils.getStringColumn("a"); + ExprNodeDesc expr2 = OperatorTestUtils.getStringColumn("b"); + ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2); + List earr = new ArrayList(); + earr.add(expr1); + earr.add(exprDesc2); + + SelectDesc selectCtx = new SelectDesc(earr, OperatorTestUtils.createOutputColumnNames(earr)); + Operator op = OperatorFactory.get(SelectDesc.class); + op.setConf(selectCtx); + + CollectDesc cd = new CollectDesc(Integer.valueOf(10)); + CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, op); + + op.initialize(new JobConf(OperatorTestUtils.class), new ObjectInspector[] {data[0].oi}); + OperatorTestUtils.assertResults(op, cdop, data, getExpectedResult()); + } +}