diff --git parser/pom.xml parser/pom.xml index 0edae27fa4..41fee3b028 100644 --- parser/pom.xml +++ parser/pom.xml @@ -56,6 +56,12 @@ 3.2.1 test + + junit + junit + ${junit.version} + test + diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java parser/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java similarity index 88% rename from ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java rename to parser/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java index 46f1ec040a..121dbaf379 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java +++ parser/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java @@ -32,8 +32,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.hive.ql.Context; - /** * ParseDriver. * @@ -93,14 +91,9 @@ public Object errorNode(TokenStream input, Token start, Token stop, RecognitionE } }; - public ASTNode parse(String command) throws ParseException { + public ParseResult parse(String command) throws ParseException { return parse(command, null); } - - public ASTNode parse(String command, Context ctx) - throws ParseException { - return parse(command, ctx, null); - } /** * Parses a command, optionally assigning the parser's token stream to the @@ -109,31 +102,19 @@ public ASTNode parse(String command, Context ctx) * @param command * command to parse * - * @param ctx - * context with which to associate this parser's token stream, or - * null if either no context is available or the context already has - * an existing stream + * @param configuration + * hive configuration * * @return parsed AST */ - public ASTNode parse(String command, Context ctx, String viewFullyQualifiedName) + public ParseResult parse(String command, Configuration configuration) throws ParseException { if (LOG.isDebugEnabled()) { LOG.debug("Parsing command: " + command); } - Configuration configuration = ctx == null ? null : ctx.getConf(); GenericHiveLexer lexer = GenericHiveLexer.of(command, configuration); TokenRewriteStream tokens = new TokenRewriteStream(lexer); - if (ctx != null) { - if (viewFullyQualifiedName == null) { - // Top level query - ctx.setTokenRewriteStream(tokens); - } else { - // It is a view - ctx.addViewTokenRewriteStream(viewFullyQualifiedName, tokens); - } - } HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); parser.setHiveConf(configuration); @@ -154,7 +135,7 @@ public ASTNode parse(String command, Context ctx, String viewFullyQualifiedName) ASTNode tree = (ASTNode) r.getTree(); tree.setUnknownTokenBoundaries(); - return tree; + return new ParseResult(tree, tokens); } /* @@ -195,15 +176,14 @@ public ASTNode parseHint(String command) throws ParseException { * the input schema and hence the Result Expression cannot be analyzed by the regular Hive * translation process. */ - public ASTNode parseSelect(String command, Context ctx) throws ParseException { + public ParseResult parseSelect(String command, Configuration configuration) throws ParseException { LOG.debug("Parsing command: {}", command); - Configuration configuration = ctx == null ? null : ctx.getConf(); GenericHiveLexer lexer = GenericHiveLexer.of(command, configuration); TokenRewriteStream tokens = new TokenRewriteStream(lexer); - if (ctx != null) { - ctx.setTokenRewriteStream(tokens); - } +// if (ctx != null) { +// ctx.setTokenRewriteStream(tokens); +// } HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); parser.setHiveConf(configuration); @@ -222,7 +202,7 @@ public ASTNode parseSelect(String command, Context ctx) throws ParseException { throw new ParseException(parser.errors); } - return (ASTNode) r.getTree(); + return new ParseResult((ASTNode) r.getTree(), tokens); } public ASTNode parseExpression(String command) throws ParseException { LOG.debug("Parsing expression: {}", command); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java parser/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java similarity index 100% rename from ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java rename to parser/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java diff --git parser/src/java/org/apache/hadoop/hive/ql/parse/ParseResult.java parser/src/java/org/apache/hadoop/hive/ql/parse/ParseResult.java new file mode 100644 index 0000000000..89629f371d --- /dev/null +++ parser/src/java/org/apache/hadoop/hive/ql/parse/ParseResult.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.parse; + +import org.antlr.runtime.TokenRewriteStream; + +public class ParseResult { + private final ASTNode tree; + private final TokenRewriteStream tokenRewriteStream; + + public ParseResult(ASTNode tree, TokenRewriteStream tokenRewriteStream) { + this.tree = tree; + this.tokenRewriteStream = tokenRewriteStream; + } + + public ASTNode getTree() { + return tree; + } + + public TokenRewriteStream getTokenRewriteStream() { + return tokenRewriteStream; + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java similarity index 93% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java index 4ec111cf1e..35fc2a4160 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java @@ -19,9 +19,7 @@ import java.io.IOException; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.Context; -import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.conf.Configuration; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -31,13 +29,12 @@ * various Parser tests for INSERT/UPDATE/DELETE */ public class TestIUD { - private static HiveConf conf; + private static Configuration conf; private ParseDriver pd; @BeforeClass public static void initialize() { - conf = new HiveConf(SemanticAnalyzer.class); - SessionState.start(conf); + conf = new Configuration(); } @Before @@ -48,13 +45,9 @@ public void setup() throws SemanticException, IOException { ASTNode parse(String query) throws ParseException { return parse(query, pd, conf); } - static ASTNode parse(String query, ParseDriver pd, HiveConf conf) throws ParseException { + static ASTNode parse(String query, ParseDriver pd, Configuration conf) throws ParseException { ASTNode nd = null; - try { - nd = pd.parse(query, new Context(conf)); - } catch (IOException e) { - e.printStackTrace(); - } + nd = pd.parse(query, conf).getTree(); return (ASTNode) nd.getChild(0); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java similarity index 97% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java index 396d344dd6..82deca7377 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java @@ -18,12 +18,10 @@ package org.apache.hadoop.hive.ql.parse; import org.antlr.runtime.tree.RewriteEmptyStreamException; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.conf.Configuration; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -34,13 +32,12 @@ * Testing parsing for SQL Merge statement */ public class TestMergeStatement { - private static HiveConf conf; + private static Configuration conf; private ParseDriver pd; @BeforeClass public static void initialize() { - conf = new HiveConf(SemanticAnalyzer.class); - SessionState.start(conf); + conf = new Configuration(); } @Before diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java similarity index 98% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java index d6250f1c03..1aa1a40da3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java @@ -44,7 +44,7 @@ public void testParse() throws Exception { String whereStr = "field5=1 and field6 in ('a', 'b')"; String havingStr = "sum(field7) > 11"; ASTNode tree = parseDriver.parse(selectStr + " from table1 where " + whereStr - + " group by field1, field2 having " + havingStr); + + " group by field1, field2 having " + havingStr).getTree(); assertEquals(tree.getType(), 0); assertEquals(tree.getChildCount(), 2); ASTNode queryTree = (ASTNode) tree.getChild(0); @@ -62,7 +62,7 @@ public void testParse() throws Exception { assertEquals(fromAST.getChild(0).getChild(0).getChild(0).getText(), "table1"); assertEquals(insertAST.getChildCount(), 5); assertEquals(insertAST.getChild(0).getType(), HiveParser.TOK_DESTINATION); - assertTree((ASTNode) insertAST.getChild(1), parseDriver.parseSelect(selectStr, null)); + assertTree((ASTNode) insertAST.getChild(1), parseDriver.parseSelect(selectStr, null).getTree()); assertEquals(insertAST.getChild(2).getType(), HiveParser.TOK_WHERE); assertTree((ASTNode) insertAST.getChild(2).getChild(0), parseDriver.parseExpression(whereStr)); assertEquals(insertAST.getChild(3).getType(), HiveParser.TOK_GROUPBY); @@ -79,7 +79,7 @@ public void testParse() throws Exception { @Test public void testParseSelect() throws Exception { - ASTNode tree = parseDriver.parseSelect("select field1, field2, sum(field3+field4)", null); + ASTNode tree = parseDriver.parseSelect("select field1, field2, sum(field3+field4)", null).getTree(); assertEquals(tree.getType(), HiveParser.TOK_SELECT); assertEquals(tree.getChildCount(), 3); for (int i = 0; i < 3; i++) { @@ -288,7 +288,7 @@ public void testJoinResulInBraces() throws Exception { + "( (select key from src)a join (select value from src)b on a.key=b.value)"; System.out.println(q); - ASTNode root = parseDriver.parse(q); + ASTNode root = parseDriver.parse(q).getTree(); System.out.println(root.dump()); } @@ -299,7 +299,7 @@ public void testFromSubqueryIsSetop() throws Exception { "explain select key from ((select key from src) union (select key from src))subq "; System.out.println(q); - ASTNode root = parseDriver.parse(q); + ASTNode root = parseDriver.parse(q).getTree(); System.out.println(root.dump()); } @@ -325,4 +325,4 @@ public void testParseDropScheduledQuery() throws Exception { parseDriver.parse("drop scheduled query asd"); } -} \ No newline at end of file +} diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java similarity index 98% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java index fdc4cfa8dd..3abaf24afd 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java @@ -59,7 +59,7 @@ public TestParseDriverIntervals(String query) { @Test public void parseInterval() throws Exception { - ASTNode root = parseDriver.parse(query); + ASTNode root = parseDriver.parse(query).getTree(); assertNotNull("failed: " + query, findFunctionNode(root)); System.out.println(root.dump()); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java similarity index 94% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java index fb7699e0e4..23dca8d42f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java @@ -30,7 +30,8 @@ @Test public void testParsePercentileCont() throws Exception { - ASTNode tree = parseDriver.parseSelect("SELECT percentile_cont(0.4) WITHIN GROUP (ORDER BY val) FROM src", null); + ASTNode tree = parseDriver.parseSelect( + "SELECT percentile_cont(0.4) WITHIN GROUP (ORDER BY val) FROM src", null).getTree(); assertEquals(1, tree.getChildCount()); ASTNode selExprNode = (ASTNode) tree.getChild(0); @@ -60,7 +61,7 @@ public void testParsePercentileCont() throws Exception { @Test public void testParseMultipleColumnRefs() throws Exception { ASTNode tree = parseDriver.parseSelect( - "SELECT rank(3, 4) WITHIN GROUP (ORDER BY val, val2) FROM src", null); + "SELECT rank(3, 4) WITHIN GROUP (ORDER BY val, val2) FROM src", null).getTree(); ASTNode selExprNode = (ASTNode) tree.getChild(0); ASTNode functionNode = (ASTNode) selExprNode.getChild(0); ASTNode withinGroupNode = (ASTNode) functionNode.getChild(3); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java similarity index 87% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java index fbf2b8bb9a..6def1c0a21 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java @@ -17,15 +17,11 @@ */ package org.apache.hadoop.hive.ql.parse; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.Context; -import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.conf.Configuration; import org.junit.Assert; -import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -37,24 +33,16 @@ */ @RunWith(Enclosed.class) public class TestSQL11ReservedKeyWordsNegative { - private static HiveConf conf = new HiveConf(SemanticAnalyzer.class); + private static Configuration conf = new Configuration(); private static ParseDriver pd = new ParseDriver(); private static ASTNode parse(String query) throws ParseException { ASTNode nd = null; - try { - nd = pd.parse(query, new Context(conf)); - } catch (IOException e) { - e.printStackTrace(); - } + nd = pd.parse(query, conf).getTree(); return (ASTNode) nd.getChild(0); } public static class TestSQL11ReservedKeyWordsNegativeMisc { - @BeforeClass - public static void initialize() { - SessionState.start(conf); - } @Test public void testSQL11ReservedKeyWords_KILL() { @@ -71,10 +59,6 @@ public void testSQL11ReservedKeyWords_KILL() { @RunWith(Parameterized.class) public static class TestSQL11ReservedKeyWordsNegativeParametrized { - @BeforeClass - public static void initialize() { - SessionState.start(conf); - } @Parameters(name = "{0}") public static Collection data() { diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java parser/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java similarity index 86% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java index 02d46dc3cd..f8d6c62512 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java @@ -19,9 +19,7 @@ import java.io.IOException; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.Context; -import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.conf.Configuration; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -32,14 +30,13 @@ * information in HIVE-10120 */ public class TestUnpermittedCharsInColumnNameCreateTableNegative { - private static HiveConf conf; + private static Configuration conf; private ParseDriver pd; @BeforeClass public static void initialize() { - conf = new HiveConf(SemanticAnalyzer.class); - SessionState.start(conf); + conf = new Configuration(); } @Before @@ -49,11 +46,7 @@ public void setup() throws SemanticException, IOException { ASTNode parse(String query) throws ParseException { ASTNode nd = null; - try { - nd = pd.parse(query, new Context(conf)); - } catch (IOException e) { - e.printStackTrace(); - } + nd = pd.parse(query, conf).getTree(); return (ASTNode) nd.getChild(0); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java parser/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java similarity index 83% rename from ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java rename to parser/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java index b13aa6855a..ff4a6b5b04 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java +++ parser/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java @@ -17,47 +17,27 @@ */ package org.apache.hadoop.hive.ql.parse.positive; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseException; -import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; - /** * Basic parser tests for multi-statement transactions */ public class TestTransactionStatement { - private static SessionState sessionState; private ParseDriver pd; - @BeforeClass - public static void initialize() { - HiveConf conf = new HiveConf(SemanticAnalyzer.class); - sessionState = SessionState.start(conf); - } - @AfterClass - public static void cleanUp() throws IOException { - if(sessionState != null) { - sessionState.close(); - } - } - @Before public void setup() throws SemanticException { pd = new ParseDriver(); } ASTNode parse(String query) throws ParseException { - ASTNode nd = pd.parse(query); + ASTNode nd = pd.parse(query).getTree(); return (ASTNode) nd.getChild(0); } @Test diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java index e89d154b7a..7ae4f708b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java @@ -34,6 +34,7 @@ import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; import org.apache.calcite.rel.RelNode; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -77,7 +78,18 @@ public static ASTNode parse(String command, Context ctx) throws ParseException { public static ASTNode parse( String command, Context ctx, String viewFullyQualifiedName) throws ParseException { ParseDriver pd = new ParseDriver(); - ASTNode tree = pd.parse(command, ctx, viewFullyQualifiedName); + Configuration configuration = ctx != null ? ctx.getConf() : null; + ParseResult parseResult = pd.parse(command, configuration); + if (ctx != null) { + if (viewFullyQualifiedName == null) { + // Top level query + ctx.setTokenRewriteStream(parseResult.getTokenRewriteStream()); + } else { + // It is a view + ctx.addViewTokenRewriteStream(viewFullyQualifiedName, parseResult.getTokenRewriteStream()); + } + } + ASTNode tree = parseResult.getTree(); tree = findRootNonNullToken(tree); handleSetColRefs(tree); return tree; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index aa8d84ec9c..5ba71a8853 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -14766,7 +14766,7 @@ private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, ASTNode selNode = null; try { ParseDriver pd = new ParseDriver(); - selNode = pd.parseSelect(selectExprStr, null); + selNode = pd.parseSelect(selectExprStr, null).getTree(); } catch (ParseException pe) { throw new SemanticException(pe); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index e564525075..d198830e9a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -77,7 +77,7 @@ public static void deInit() throws Exception { private void parseAndAnalyze(String query) throws Exception { ParseDriver hd = new ParseDriver(); - ASTNode head = (ASTNode)hd.parse(query).getChild(0); + ASTNode head = (ASTNode)hd.parse(query).getTree().getChild(0); BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head); a.analyze(head, new Context(conf)); List> roots = a.getRootTasks(); @@ -86,7 +86,7 @@ private void parseAndAnalyze(String query) throws Exception { private AlterTableCompactDesc parseAndAnalyzeAlterTable(String query) throws Exception { ParseDriver hd = new ParseDriver(); - ASTNode head = (ASTNode)hd.parse(query).getChild(0); + ASTNode head = (ASTNode)hd.parse(query).getTree().getChild(0); BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head); a.analyze(head, new Context(conf)); List> roots = a.getRootTasks(); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java index 5a0898c5da..5749fb2990 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java @@ -61,7 +61,7 @@ public void setup() throws SemanticException { } ASTNode parse(String query) throws ParseException { - ASTNode nd = pd.parse(query); + ASTNode nd = pd.parse(query).getTree(); return (ASTNode) nd.getChild(0); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java index e52216cdcc..e91f42d438 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java @@ -41,8 +41,7 @@ public static HiveConf buildHiveConf() { private static ASTNode parse(String command) throws Exception { SessionState.start(hiveConf); - Context context = new Context(hiveConf); - return (ASTNode) driver.parse(command, context).getChild(0); + return (ASTNode) driver.parse(command, hiveConf).getTree().getChild(0); } private static void assertWithClause(ASTNode root, int replConfigIndex) {