Index: ql/src/test/results/clientpositive/udf_struct.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_struct.q.out (revision 0) +++ ql/src/test/results/clientpositive/udf_struct.q.out (revision 0) @@ -0,0 +1,67 @@ +PREHOOK: query: DESCRIBE FUNCTION struct +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION struct +POSTHOOK: type: DESCFUNCTION +struct(col1, col2, col3, ...) - Creates a struct with the given field values +PREHOOK: query: DESCRIBE FUNCTION EXTENDED struct +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION EXTENDED struct +POSTHOOK: type: DESCFUNCTION +struct(col1, col2, col3, ...) - Creates a struct with the given field values +PREHOOK: query: EXPLAIN +SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 +FROM src LIMIT 1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 +FROM src LIMIT 1 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION struct 1)) (TOK_SELEXPR (TOK_FUNCTION struct 1 "a")) (TOK_SELEXPR (. (TOK_FUNCTION struct 1 "b" 1.5) col1)) (TOK_SELEXPR (. (. (TOK_FUNCTION struct 1 (TOK_FUNCTION struct "a" 1.5)) col2) col1))) (TOK_LIMIT 1))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Select Operator + expressions: + expr: struct(1) + type: struct + expr: struct(1,'a') + type: struct + expr: struct(1,'b',1.5).col1 + type: int + expr: struct(1,struct('a',1.5)).col2.col1 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: 1 + + +PREHOOK: query: SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 +FROM src LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-03_13-12-58_773_1671454742242145130/10000 +POSTHOOK: query: SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 +FROM src LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-03_13-12-58_773_1671454742242145130/10000 +{"col1":1} {"col1":1,"col2":"a"} 1 a Index: ql/src/test/queries/clientpositive/udf_struct.q =================================================================== --- ql/src/test/queries/clientpositive/udf_struct.q (revision 0) +++ ql/src/test/queries/clientpositive/udf_struct.q (revision 0) @@ -0,0 +1,9 @@ +DESCRIBE FUNCTION struct; +DESCRIBE FUNCTION EXTENDED struct; + +EXPLAIN +SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 +FROM src LIMIT 1; + +SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 +FROM src LIMIT 1; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 906189) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -152,6 +152,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSplit; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode; @@ -321,6 +322,7 @@ // Generic UDFs registerGenericUDF("array", GenericUDFArray.class); registerGenericUDF("map", GenericUDFMap.class); + registerGenericUDF("struct", GenericUDFStruct.class); registerGenericUDF("case", GenericUDFCase.class); registerGenericUDF("when", GenericUDFWhen.class); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 906189) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -1117,7 +1117,7 @@ @init { msgs.push("function name"); } @after { msgs.pop(); } : // Keyword IF is also a function name - Identifier | KW_IF | KW_ARRAY | KW_MAP + Identifier | KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT ; castExpression @@ -1357,6 +1357,7 @@ | KW_STRING | KW_ARRAY | KW_MAP + | KW_STRUCT | EQUAL | NOTEQUAL | LESSTHANOREQUALTO Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java (revision 0) @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.util.ArrayList; +import java.util.Arrays; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; + +@Description(name = "struct", + value = "_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field values") +public class GenericUDFStruct extends GenericUDF { + Object[] ret; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + + int numFields = arguments.length; + ret = new Object[numFields]; + + ArrayList fname = new ArrayList(numFields); + for (int f = 1; f <= numFields; f++) { + fname.add("col" + f); + } + StructObjectInspector soi = + ObjectInspectorFactory.getStandardStructObjectInspector(fname, Arrays.asList(arguments)); + return soi; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + for (int i = 0; i < arguments.length; i++) { + ret[i] = arguments[i].get(); + } + return ret; + } + + @Override + public String getDisplayString(String[] children) { + StringBuilder sb = new StringBuilder(); + sb.append("struct("); + for (int i = 0; i < children.length; i++) { + if (i > 0) { + sb.append(','); + } + sb.append(children[i]); + } + sb.append(')'); + return sb.toString(); + } +}