diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 9c9f4cc..a77ad12 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -366,6 +366,7 @@ minitez.query.files=bucket_map_join_tez1.q,\ tez_union_view.q,\ tez_union_decimal.q,\ tez_union_group_by.q,\ + tez_union_with_udf.q,\ tez_smb_main.q,\ tez_smb_1.q,\ vectorized_dynamic_partition_pruning.q,\ diff --git a/itests/test-serde/src/main/java/org/apache/hadoop/hive/udf/example/GenericUDFExampleAdd.java b/itests/test-serde/src/main/java/org/apache/hadoop/hive/udf/example/GenericUDFExampleAdd.java new file mode 100644 index 0000000..85906c2 --- /dev/null +++ b/itests/test-serde/src/main/java/org/apache/hadoop/hive/udf/example/GenericUDFExampleAdd.java @@ -0,0 +1,48 @@ +package org.apache.hadoop.hive.udf.example; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; + +public class GenericUDFExampleAdd extends GenericUDF { + + Converter converter0; + Converter converter1; + DoubleWritable result = new DoubleWritable(); + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + ObjectInspector doubleOI = PrimitiveObjectInspectorFactory + .getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.DOUBLE); + converter0 = (Converter) ObjectInspectorConverters.getConverter(arguments[0], doubleOI); + converter1 = (Converter) ObjectInspectorConverters.getConverter(arguments[1], doubleOI); + return doubleOI; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + DoubleWritable dw0 = (DoubleWritable) converter0.convert(arguments[0].get()); + DoubleWritable dw1 = (DoubleWritable) converter1.convert(arguments[0].get()); + if (dw0 == null || dw1 == null) { + return null; + } + result.set(dw0.get() + dw1.get()); + return result; + } + + @Override + public String getDisplayString(String[] children) { + return "GenericUDFExampleAdd"; + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 5b21af9..0c536eb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -1059,6 +1059,7 @@ public void exceptionThrown(Exception e) { */ private static void serializeObjectByKryo(Kryo kryo, Object plan, OutputStream out) { Output output = new Output(out); + kryo.setClassLoader(getSessionSpecifiedClassLoader()); kryo.writeObject(output, plan); output.close(); } @@ -1082,6 +1083,7 @@ private static void serializeObjectByKryo(Kryo kryo, Object plan, OutputStream o private static T deserializeObjectByKryo(Kryo kryo, InputStream in, Class clazz ) { Input inp = new Input(in); + kryo.setClassLoader(getSessionSpecifiedClassLoader()); T t = kryo.readObject(inp,clazz); inp.close(); return t; diff --git a/ql/src/test/queries/clientpositive/tez_union_with_udf.q b/ql/src/test/queries/clientpositive/tez_union_with_udf.q new file mode 100644 index 0000000..6826530 --- /dev/null +++ b/ql/src/test/queries/clientpositive/tez_union_with_udf.q @@ -0,0 +1,13 @@ +select * from (select key + key from src limit 1) a +union all +select * from (select key + key from src limit 1) b; + + +add jar ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar; + +create temporary function example_add as 'org.apache.hadoop.hive.udf.example.GenericUDFExampleAdd'; + +-- Now try the query with the UDF +select example_add(key, key)from (select key from src limit 1) a +union all +select example_add(key, key)from (select key from src limit 1) b; diff --git a/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out b/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out new file mode 100644 index 0000000..923e098 --- /dev/null +++ b/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out @@ -0,0 +1,36 @@ +PREHOOK: query: select * from (select key + key from src limit 1) a +union all +select * from (select key + key from src limit 1) b +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select * from (select key + key from src limit 1) a +union all +select * from (select key + key from src limit 1) b +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +476.0 +476.0 +PREHOOK: query: create temporary function example_add as 'org.apache.hadoop.hive.udf.example.GenericUDFExampleAdd' +PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: example_add +POSTHOOK: query: create temporary function example_add as 'org.apache.hadoop.hive.udf.example.GenericUDFExampleAdd' +POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: example_add +PREHOOK: query: -- Now try the query with the UDF +select example_add(key, key)from (select key from src limit 1) a +union all +select example_add(key, key)from (select key from src limit 1) b +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- Now try the query with the UDF +select example_add(key, key)from (select key from src limit 1) a +union all +select example_add(key, key)from (select key from src limit 1) b +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +476.0 +476.0