Index: ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g (revision 1507762) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g (working copy) @@ -125,11 +125,11 @@ @init {gParent.msgs.push("lateral view"); } @after {gParent.msgs.pop(); } : - KW_LATERAL KW_VIEW KW_OUTER function tableAlias KW_AS identifier (COMMA identifier)* - -> ^(TOK_LATERAL_VIEW_OUTER ^(TOK_SELECT ^(TOK_SELEXPR function identifier+ tableAlias))) + KW_LATERAL KW_VIEW KW_OUTER function tableAlias (KW_AS identifier (COMMA identifier)*)? + -> ^(TOK_LATERAL_VIEW_OUTER ^(TOK_SELECT ^(TOK_SELEXPR function identifier* tableAlias))) | - KW_LATERAL KW_VIEW function tableAlias KW_AS identifier (COMMA identifier)* - -> ^(TOK_LATERAL_VIEW ^(TOK_SELECT ^(TOK_SELEXPR function identifier+ tableAlias))) + KW_LATERAL KW_VIEW function tableAlias (KW_AS identifier (COMMA identifier)*)? + -> ^(TOK_LATERAL_VIEW ^(TOK_SELECT ^(TOK_SELEXPR function identifier* tableAlias))) ; tableAlias Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1507762) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -2496,14 +2496,12 @@ (ASTNode) selExprList.getChild(1), ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg())); } - // Require an AS for UDTFs for column aliases + ASTNode selExpr = (ASTNode) selExprList.getChild(posn); - if (selExpr.getChildCount() < 2) { - throw new SemanticException(generateErrorMessage(udtfExpr, - ErrorMsg.UDTF_REQUIRE_AS.getMsg())); - } + // Get the column / table aliases from the expression. Start from 1 as // 0 is the TOK_FUNCTION + // column names also can be inferred from result of UDTF for (int i = 1; i < selExpr.getChildCount(); i++) { ASTNode selExprChild = (ASTNode) selExpr.getChild(i); switch (selExprChild.getType()) { @@ -5493,9 +5491,15 @@ } StructObjectInspector outputOI = genericUDTF.initialize(colOIs); + int numUdtfCols = outputOI.getAllStructFieldRefs().size(); + if (colAliases.isEmpty()) { + // user did not specfied alias names, infer names from outputOI + for (StructField field : outputOI.getAllStructFieldRefs()) { + colAliases.add(field.getFieldName()); + } + } // Make sure that the number of column aliases in the AS clause matches // the number of columns output by the UDTF - int numUdtfCols = outputOI.getAllStructFieldRefs().size(); int numSuppliedAliases = colAliases.size(); if (numUdtfCols != numSuppliedAliases) { throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH Index: ql/src/test/queries/clientpositive/lateral_view_noalias.q =================================================================== --- ql/src/test/queries/clientpositive/lateral_view_noalias.q (revision 0) +++ ql/src/test/queries/clientpositive/lateral_view_noalias.q (working copy) @@ -0,0 +1,12 @@ +--HIVE-2608 Do not require AS a,b,c part in LATERAL VIEW +EXPLAIN SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2; +SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2; + +EXPLAIN SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2; +SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2; + +-- view +create view lv_noalias as SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2; + +explain select * from lv_noalias a join lv_noalias b on a.key=b.key; +select * from lv_noalias a join lv_noalias b on a.key=b.key; \ No newline at end of file Index: ql/src/test/results/clientpositive/lateral_view_noalias.q.out =================================================================== --- ql/src/test/results/clientpositive/lateral_view_noalias.q.out (revision 0) +++ ql/src/test/results/clientpositive/lateral_view_noalias.q.out (working copy) @@ -0,0 +1,345 @@ +PREHOOK: query: --HIVE-2608 Do not require AS a,b,c part in LATERAL VIEW +EXPLAIN SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: --HIVE-2608 Do not require AS a,b,c part in LATERAL VIEW +EXPLAIN SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_LATERAL_VIEW (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 'key1' 100 'key2' 200)) (TOK_TABALIAS myTab))) (TOK_TABREF (TOK_TABNAME src)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME myTab)))) (TOK_LIMIT 2))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Lateral View Forward + Select Operator + Lateral View Join Operator + outputColumnNames: _col4, _col5 + Select Operator + expressions: + expr: _col4 + type: string + expr: _col5 + type: int + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: map('key1':100,'key2':200) + type: map + outputColumnNames: _col0 + UDTF Operator + function name: explode + Lateral View Join Operator + outputColumnNames: _col4, _col5 + Select Operator + expressions: + expr: _col4 + type: string + expr: _col5 + type: int + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: 2 + + +PREHOOK: query: SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +key1 100 +key2 200 +PREHOOK: query: EXPLAIN SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 'key1' 100 'key2' 200)))) (TOK_LIMIT 2))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Select Operator + expressions: + expr: map('key1':100,'key2':200) + type: map + outputColumnNames: _col0 + UDTF Operator + function name: explode + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: 2 + + +PREHOOK: query: SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +key1 100 +key2 200 +PREHOOK: query: -- view +create view lv_noalias as SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2 +PREHOOK: type: CREATEVIEW +POSTHOOK: query: -- view +create view lv_noalias as SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Output: default@lv_noalias +PREHOOK: query: explain select * from lv_noalias a join lv_noalias b on a.key=b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain select * from lv_noalias a join lv_noalias b on a.key=b.key +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME lv_noalias) a) (TOK_TABREF (TOK_TABNAME lv_noalias) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1, Stage-3 + Stage-3 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a:src + TableScan + alias: src + Lateral View Forward + Select Operator + Lateral View Join Operator + outputColumnNames: _col4, _col5 + Select Operator + expressions: + expr: _col4 + type: string + expr: _col5 + type: int + outputColumnNames: _col0, _col1 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: int + Select Operator + expressions: + expr: map('key1':100,'key2':200) + type: map + outputColumnNames: _col0 + UDTF Operator + function name: explode + Lateral View Join Operator + outputColumnNames: _col4, _col5 + Select Operator + expressions: + expr: _col4 + type: string + expr: _col5 + type: int + outputColumnNames: _col0, _col1 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: int + Reduce Operator Tree: + Extract + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + $INTNAME + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: int + $INTNAME1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: int + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2, _col3 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: int + expr: _col2 + type: string + expr: _col3 + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-3 + Map Reduce + Alias -> Map Operator Tree: + b:src + TableScan + alias: src + Lateral View Forward + Select Operator + Lateral View Join Operator + outputColumnNames: _col4, _col5 + Select Operator + expressions: + expr: _col4 + type: string + expr: _col5 + type: int + outputColumnNames: _col0, _col1 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: int + Select Operator + expressions: + expr: map('key1':100,'key2':200) + type: map + outputColumnNames: _col0 + UDTF Operator + function name: explode + Lateral View Join Operator + outputColumnNames: _col4, _col5 + Select Operator + expressions: + expr: _col4 + type: string + expr: _col5 + type: int + outputColumnNames: _col0, _col1 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: int + Reduce Operator Tree: + Extract + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: select * from lv_noalias a join lv_noalias b on a.key=b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@lv_noalias +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select * from lv_noalias a join lv_noalias b on a.key=b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@lv_noalias +POSTHOOK: Input: default@src +#### A masked pattern was here #### +key1 100 key1 100 +key2 200 key2 200