Index: ql/src/test/results/clientpositive/udf_inline.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_inline.q.out (revision 0) +++ ql/src/test/results/clientpositive/udf_inline.q.out (revision 0) @@ -0,0 +1,77 @@ +PREHOOK: query: describe function inline +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: describe function inline +POSTHOOK: type: DESCFUNCTION +inline( ARRAY( STRUCT()[,STRUCT()] - explodes and array and struct into a table +PREHOOK: query: explain SELECT inline( + ARRAY( + STRUCT (1,'dude!'), + STRUCT (2,'Wheres'), + STRUCT (3,'my car?') + ) +) as (id, text) FROM SRC limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain SELECT inline( + ARRAY( + STRUCT (1,'dude!'), + STRUCT (2,'Wheres'), + STRUCT (3,'my car?') + ) +) as (id, text) FROM SRC limit 2 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME SRC))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION inline (TOK_FUNCTION ARRAY (TOK_FUNCTION STRUCT 1 'dude!') (TOK_FUNCTION STRUCT 2 'Wheres') (TOK_FUNCTION STRUCT 3 'my car?'))) id text)) (TOK_LIMIT 2))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Select Operator + expressions: + expr: array(struct(1,'dude!'),struct(2,'Wheres'),struct(3,'my car?')) + type: array> + outputColumnNames: _col0 + UDTF Operator + function name: inline + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: 2 + + +PREHOOK: query: SELECT inline( + ARRAY( + STRUCT (1,'dude!'), + STRUCT (2,'Wheres'), + STRUCT (3,'my car?') + ) +) as (id, text) FROM SRC limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT inline( + ARRAY( + STRUCT (1,'dude!'), + STRUCT (2,'Wheres'), + STRUCT (3,'my car?') + ) +) as (id, text) FROM SRC limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +1 dude! +2 Wheres Index: ql/src/test/results/clientpositive/show_functions.q.out =================================================================== --- ql/src/test/results/clientpositive/show_functions.q.out (revision 1367096) +++ ql/src/test/results/clientpositive/show_functions.q.out (working copy) @@ -76,6 +76,7 @@ in in_file index +inline instr isnotnull isnull @@ -202,6 +203,7 @@ explode from_unixtime in_file +inline_table json_tuple lcase like Index: ql/src/test/queries/clientpositive/udf_inline.q =================================================================== --- ql/src/test/queries/clientpositive/udf_inline.q (revision 0) +++ ql/src/test/queries/clientpositive/udf_inline.q (revision 0) @@ -0,0 +1,18 @@ +describe function inline; + +explain SELECT inline( + ARRAY( + STRUCT (1,'dude!'), + STRUCT (2,'Wheres'), + STRUCT (3,'my car?') + ) +) as (id, text) FROM SRC limit 2; + +SELECT inline( + ARRAY( + STRUCT (1,'dude!'), + STRUCT (2,'Wheres'), + STRUCT (3,'my car?') + ) +) as (id, text) FROM SRC limit 2; + Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 1367096) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -204,6 +204,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFJSONTuple; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFParseUrlTuple; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFStack; @@ -467,6 +468,7 @@ // Generic UDTF's registerGenericUDTF("explode", GenericUDTFExplode.class); + registerGenericUDTF("inline", GenericUDTFInline.class); registerGenericUDTF("json_tuple", GenericUDTFJSONTuple.class); registerGenericUDTF("parse_url_tuple", GenericUDTFParseUrlTuple.class); registerGenericUDTF("stack", GenericUDTFStack.class); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFInline.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFInline.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFInline.java (revision 0) @@ -0,0 +1,76 @@ +package org.apache.hadoop.hive.ql.udf.generic; + +import java.util.List; +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; + +@Description(name ="inline", value= "_FUNC_( ARRAY( STRUCT()[,STRUCT()] " ++ "- explodes and array and struct into a table") +public class GenericUDTFInline extends GenericUDTF { + + private Object[] forwardObj; + private ListObjectInspector li; + private StructObjectInspector daStruct; + + public GenericUDTFInline(){ + + } + + @Override + public StructObjectInspector initialize(ObjectInspector[] ois) throws UDFArgumentException { + //There should be one argument that is a array of struct + if (ois.length!=1){ + throw new UDFArgumentException("UDF tables only one argument"); + } + if (ois[0].getCategory()!= Category.LIST){ + throw new UDFArgumentException("Top level object must be an array but " + + "was "+ois[0].getTypeName()); + } + li = (ListObjectInspector) ois[0]; + ObjectInspector sub=li.getListElementObjectInspector(); + if (sub.getCategory() != Category.STRUCT){ + throw new UDFArgumentException("The sub element must be struct, but was "+sub.getTypeName()); + } + daStruct = (StructObjectInspector) sub; + forwardObj = new Object[daStruct.getAllStructFieldRefs().size()]; + return daStruct; + } + + @Override + public void process(Object[] os) throws HiveException { + //list is always one item + List l = li.getList(os); + List fields = this.daStruct.getAllStructFieldRefs(); + for (Object linner: l ){ + List innerList = (List) linner; + for (List rowList : innerList){ + int i=0; + for (StructField f: fields){ + GenericUDFUtils.ReturnObjectInspectorResolver res + = new GenericUDFUtils.ReturnObjectInspectorResolver(); + res.update(f.getFieldObjectInspector()); + this.forwardObj[i]=res.convertIfNecessary(rowList.get(i), f.getFieldObjectInspector()); + i++; + } + forward(this.forwardObj); + } + } + } + + @Override + public void close() throws HiveException { + } + + @Override + public String toString() { + return "inline"; + } +}