diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java index f707451..48c0a4a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java @@ -41,7 +41,7 @@ */ public class ParseDriver { - private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.ParseDriver"); + private static final Logger LOG = LoggerFactory.getLogger(ParseDriver.class); /** * ANTLRNoCaseStringStream. @@ -229,7 +229,6 @@ public ASTNode parse(String command, Context ctx, String viewFullyQualifiedName) try { r = parser.statement(); } catch (RecognitionException e) { - e.printStackTrace(); throw new ParseException(parser.errors); } @@ -250,7 +249,7 @@ public ASTNode parse(String command, Context ctx, String viewFullyQualifiedName) * Parse a string as a query hint. */ public ASTNode parseHint(String command) throws ParseException { - LOG.info("Parsing hint: " + command); + LOG.debug("Parsing hint: {}", command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); @@ -260,12 +259,11 @@ public ASTNode parseHint(String command) throws ParseException { try { r = parser.hint(); } catch (RecognitionException e) { - e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { - LOG.info("Parse Completed"); + LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { @@ -286,9 +284,7 @@ public ASTNode parseHint(String command) throws ParseException { * translation process. */ public ASTNode parseSelect(String command, Context ctx) throws ParseException { - if (LOG.isDebugEnabled()) { - LOG.debug("Parsing command: " + command); - } + LOG.debug("Parsing command: {}", command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); @@ -301,7 +297,6 @@ public ASTNode parseSelect(String command, Context ctx) throws ParseException { try { r = parser.selectClause(); } catch (RecognitionException e) { - e.printStackTrace(); throw new ParseException(parser.errors); } @@ -316,7 +311,7 @@ public ASTNode parseSelect(String command, Context ctx) throws ParseException { return r.getTree(); } public ASTNode parseExpression(String command) throws ParseException { - LOG.info("Parsing expression: " + command); + LOG.debug("Parsing expression: {}", command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); @@ -326,12 +321,11 @@ public ASTNode parseExpression(String command) throws ParseException { try { r = parser.expression(); } catch (RecognitionException e) { - e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { - LOG.info("Parse Completed"); + LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { @@ -350,7 +344,6 @@ public ASTNode parseTriggerExpression(String command) throws ParseException { try { r = parser.gResourcePlanParser.triggerExpressionStandalone(); } catch (RecognitionException e) { - e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() != 0) { @@ -371,7 +364,6 @@ public ASTNode parseTriggerActionExpression(String command) throws ParseExceptio try { r = parser.gResourcePlanParser.triggerActionExpressionStandalone(); } catch (RecognitionException e) { - e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() != 0) {