diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java index a68a5c3..4ddf5b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java @@ -40,78 +40,78 @@ public enum ErrorMsg { GENERIC_ERROR("Exception while processing"), INVALID_TABLE("Table not found", "42S02"), - INVALID_COLUMN("Invalid Column Reference"), - INVALID_INDEX("Invalid Index"), - INVALID_TABLE_OR_COLUMN("Invalid Table Alias or Column Reference"), - AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous Table Alias or Column Reference"), + INVALID_COLUMN("Invalid column reference"), + INVALID_INDEX("Invalid index"), + INVALID_TABLE_OR_COLUMN("Invalid table alias or column reference"), + AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous table alias or column reference"), INVALID_PARTITION("Partition not found"), - AMBIGUOUS_COLUMN("Ambiguous Column Reference"), - AMBIGUOUS_TABLE_ALIAS("Ambiguous Table Alias"), - INVALID_TABLE_ALIAS("Invalid Table Alias"), - NO_TABLE_ALIAS("No Table Alias"), - INVALID_FUNCTION("Invalid Function"), - INVALID_FUNCTION_SIGNATURE("Function Argument Type Mismatch"), - INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"), - INVALID_ARGUMENT("Wrong Arguments"), - INVALID_ARGUMENT_LENGTH("Arguments Length Mismatch", "21000"), - INVALID_ARGUMENT_TYPE("Argument Type Mismatch"), - INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"), - INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"), - INVALID_JOIN_CONDITION_3("OR not supported in Join currently"), - INVALID_TRANSFORM("TRANSFORM with Other Select Columns not Supported"), - DUPLICATE_GROUPBY_KEY("Repeated Key in Group By"), - UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on Different Columns not Supported with skew in data"), - NO_SUBQUERY_ALIAS("No Alias For Subquery"), - NO_INSERT_INSUBQUERY("Cannot insert in a Subquery. Inserting to table "), - NON_KEY_EXPR_IN_GROUPBY("Expression Not In Group By Key"), - INVALID_XPATH("General . and [] Operators are Not Supported"), - INVALID_PATH("Invalid Path"), ILLEGAL_PATH("Path is not legal"), - INVALID_NUMERICAL_CONSTANT("Invalid Numerical Constant"), - INVALID_ARRAYINDEX_CONSTANT("Non Constant Expressions for Array Indexes not Supported"), - INVALID_MAPINDEX_CONSTANT("Non Constant Expression for Map Indexes not Supported"), - INVALID_MAPINDEX_TYPE("Map Key Type does not Match Index Expression Type"), - NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"), + AMBIGUOUS_COLUMN("Ambiguous column reference"), + AMBIGUOUS_TABLE_ALIAS("Ambiguous table alias"), + INVALID_TABLE_ALIAS("Invalid table alias"), + NO_TABLE_ALIAS("No table alias"), + INVALID_FUNCTION("Invalid function"), + INVALID_FUNCTION_SIGNATURE("Function argument type mismatch"), + INVALID_OPERATOR_SIGNATURE("Operator argument type mismatch"), + INVALID_ARGUMENT("Wrong arguments"), + INVALID_ARGUMENT_LENGTH("Arguments length mismatch", "21000"), + INVALID_ARGUMENT_TYPE("Argument type mismatch"), + INVALID_JOIN_CONDITION_1("Both left and right aliases encountered in JOIN"), + INVALID_JOIN_CONDITION_2("Neither left nor right aliases encountered in JOIN"), + INVALID_JOIN_CONDITION_3("OR not supported in JOIN currently"), + INVALID_TRANSFORM("TRANSFORM with other SELECT columns not supported"), + DUPLICATE_GROUPBY_KEY("Repeated key in GROUP BY"), + UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on different columns not supported with skew in data"), + NO_SUBQUERY_ALIAS("No alias for subquery"), + NO_INSERT_INSUBQUERY("Cannot insert in a subquery. Inserting to table "), + NON_KEY_EXPR_IN_GROUPBY("Expression not in GROUP BY key"), + INVALID_XPATH("General . and [] operators are not supported"), + INVALID_PATH("Invalid path"), ILLEGAL_PATH("Path is not legal"), + INVALID_NUMERICAL_CONSTANT("Invalid numerical constant"), + INVALID_ARRAYINDEX_CONSTANT("Non-constant expressions for array indexes not supported"), + INVALID_MAPINDEX_CONSTANT("Non-constant expression for map indexes not supported"), + INVALID_MAPINDEX_TYPE("MAP key type does not match index expression type"), + NON_COLLECTION_TYPE("[] not valid on non-collection types"), SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"), COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES("Duplicate column name:"), INVALID_BUCKET_NUMBER("Bucket number should be bigger than zero"), - COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"), - SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"), - SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"), - NO_PARTITION_PREDICATE("No Partition Predicate Found"), - INVALID_DOT(". operator is only supported on struct or list of struct types"), + COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in CLUSTER BY and SORT BY"), + SAMPLE_RESTRICTION("Cannot SAMPLE on more than two columns"), + SAMPLE_COLUMN_NOT_FOUND("SAMPLE column not found"), + NO_PARTITION_PREDICATE("No partition predicate found"), + INVALID_DOT(". Operator is only supported on struct or list of struct types"), INVALID_TBL_DDL_SERDE("Either list of columns or a custom serializer should be specified"), TARGET_TABLE_COLUMN_MISMATCH( "Cannot insert into target table because column number/types are different"), - TABLE_ALIAS_NOT_ALLOWED("Table Alias not Allowed in Sampling Clause"), - CLUSTERBY_DISTRIBUTEBY_CONFLICT("Cannot have both Cluster By and Distribute By Clauses"), - ORDERBY_DISTRIBUTEBY_CONFLICT("Cannot have both Order By and Distribute By Clauses"), - CLUSTERBY_SORTBY_CONFLICT("Cannot have both Cluster By and Sort By Clauses"), - ORDERBY_SORTBY_CONFLICT("Cannot have both Order By and Sort By Clauses"), - CLUSTERBY_ORDERBY_CONFLICT("Cannot have both Cluster By and Order By Clauses"), - NO_LIMIT_WITH_ORDERBY("In strict mode, limit must be specified if ORDER BY is present"), + TABLE_ALIAS_NOT_ALLOWED("Table alias not allowed in sampling clause"), + CLUSTERBY_DISTRIBUTEBY_CONFLICT("Cannot have both CLUSTER BY and DISTRIBUTE BY clauses"), + ORDERBY_DISTRIBUTEBY_CONFLICT("Cannot have both ORDER BY and DISTRIBUTE BY clauses"), + CLUSTERBY_SORTBY_CONFLICT("Cannot have both CLUSTER BY and SORT BY clauses"), + ORDERBY_SORTBY_CONFLICT("Cannot have both ORDER BY and SORT BY clauses"), + CLUSTERBY_ORDERBY_CONFLICT("Cannot have both CLUSTER BY and ORDER BY clauses"), + NO_LIMIT_WITH_ORDERBY("In strict mode, if ORDER BY is specified, LIMIT must also be specified"), NO_CARTESIAN_PRODUCT("In strict mode, cartesian product is not allowed. " + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - UNION_NOTIN_SUBQ("Top level Union is not supported currently; use a subquery for the union"), - INVALID_INPUT_FORMAT_TYPE("Input Format must implement InputFormat"), + UNION_NOTIN_SUBQ("Top level UNION is not supported currently; use a subquery for the UNION"), + INVALID_INPUT_FORMAT_TYPE("Input format must implement InputFormat"), INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement HiveOutputFormat, " + "otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"), NO_VALID_PARTN("The query does not reference any valid partition. " + "To run this query, set hive.mapred.mode=nonstrict"), - NO_OUTER_MAPJOIN("Map Join cannot be performed with Outer join"), - INVALID_MAPJOIN_HINT("neither table specified as map-table"), - INVALID_MAPJOIN_TABLE("result of a union cannot be a map table"), - NON_BUCKETED_TABLE("Sampling Expression Needed for Non-Bucketed Table"), + NO_OUTER_MAPJOIN("MAPJOIN cannot be performed with OUTER JOIN"), + INVALID_MAPJOIN_HINT("Neither table specified as map-table"), + INVALID_MAPJOIN_TABLE("Result of a union cannot be a map table"), + NON_BUCKETED_TABLE("Sampling expression needed for non-bucketed table"), BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than " - + "denaminator in sample clause for Table"), - NEED_PARTITION_ERROR("need to specify partition columns because the destination " + + "denaminator in sample clause for table"), + NEED_PARTITION_ERROR("Need to specify partition columns because the destination " + "table is partitioned."), CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in " + "the same command"), LINES_TERMINATED_BY_NON_NEWLINE("LINES TERMINATED BY only supports newline '\\n' right now"), - CTAS_COLLST_COEXISTENCE("Create table as select command cannot specify the list of columns " + CTAS_COLLST_COEXISTENCE("CREATE TABLE AS SELECT command cannot specify the list of columns " + "for the target table."), - CTLT_COLLST_COEXISTENCE("Create table like command cannot specify the list of columns for " + CTLT_COLLST_COEXISTENCE("CREATE TABLE LIKE command cannot specify the list of columns for " + "the target table."), INVALID_SELECT_SCHEMA("Cannot derive schema from the select-clause."), CTAS_PARCOL_COEXISTENCE("CREATE-TABLE-AS-SELECT does not support partitioning in the target " @@ -132,8 +132,8 @@ public enum ErrorMsg { UDTF_ALIAS_MISMATCH("The number of aliases supplied in the AS clause does not match the " + "number of columns output by the UDTF"), UDF_STATEFUL_INVALID_LOCATION("Stateful UDF's can only be invoked in the SELECT list"), - LATERAL_VIEW_WITH_JOIN("Join with a lateral view is not supported"), - LATERAL_VIEW_INVALID_CHILD("Lateral view AST with invalid child"), + LATERAL_VIEW_WITH_JOIN("JOIN with a LATERAL VIEW is not supported"), + LATERAL_VIEW_INVALID_CHILD("LATERAL VIEW AST with invalid child"), OUTPUT_SPECIFIED_MULTIPLE_TIMES("The same output cannot be present multiple times: "), INVALID_AS("AS clause has an invalid number of aliases"), VIEW_COL_MISMATCH("The number of columns produced by the SELECT clause does not match the " @@ -155,9 +155,9 @@ public enum ErrorMsg { + "STRING instead."), CREATE_NON_NATIVE_AS("CREATE TABLE AS SELECT cannot be used for a non-native table"), LOAD_INTO_NON_NATIVE("A non-native table cannot be used as target for LOAD"), - LOCKMGR_NOT_SPECIFIED("lock manager not specified correctly, set hive.lock.manager"), - LOCKMGR_NOT_INITIALIZED("lock manager could not be initialized, check hive.lock.manager "), - LOCK_CANNOT_BE_ACQUIRED("locks on the underlying objects cannot be acquired. retry after some time"), + LOCKMGR_NOT_SPECIFIED("Lock manager not specified correctly, set hive.lock.manager"), + LOCKMGR_NOT_INITIALIZED("Lock manager could not be initialized, check hive.lock.manager "), + LOCK_CANNOT_BE_ACQUIRED("Locks on the underlying objects cannot be acquired. retry after some time"), ZOOKEEPER_CLIENT_COULD_NOT_BE_INITIALIZED("Check hive.zookeeper.quorum and hive.zookeeper.client.port"), OVERWRITE_ARCHIVED_PART("Cannot overwrite an archived partition. " + "Unarchive before running this command."), @@ -328,7 +328,7 @@ public enum ErrorMsg { } private static void renderPosition(StringBuilder sb, ASTNode tree) { - sb.append("line "); + sb.append("Line "); sb.append(getLine(tree)); sb.append(":"); sb.append(getCharPositionInLine(tree)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java index d738c48..26fc6d0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java @@ -25,6 +25,7 @@ import java.util.HashMap; import org.antlr.runtime.ANTLRStringStream; import org.antlr.runtime.BitSet; import org.antlr.runtime.CharStream; +import org.antlr.runtime.FailedPredicateException; import org.antlr.runtime.IntStream; import org.antlr.runtime.MismatchedTokenException; import org.antlr.runtime.NoViableAltException; @@ -190,6 +191,7 @@ public class ParseDriver { xlateMap.put("TILDE", "~"); xlateMap.put("BITWISEOR", "|"); xlateMap.put("BITWISEXOR", "^"); + xlateMap.put("CharSetLiteral", "\\'"); } public static Collection getKeywords() { @@ -208,7 +210,7 @@ public class ParseDriver { /** * ANTLRNoCaseStringStream. - * + * */ //This class provides and implementation for a case insensitive token checker //for the lexical analysis part of antlr. By converting the token stream into @@ -227,6 +229,7 @@ public class ParseDriver { super(input); } + @Override public int LA(int i) { int returnChar = super.LA(i); @@ -258,12 +261,14 @@ public class ParseDriver { errors = new ArrayList(); } + @Override public void displayRecognitionError(String[] tokenNames, RecognitionException e) { errors.add(new ParseError(this, e, tokenNames)); } + @Override public String getErrorMessage(RecognitionException e, String[] tokenNames) { String msg = null; @@ -301,27 +306,45 @@ public class ParseDriver { errors = new ArrayList(); } + @Override protected void mismatch(IntStream input, int ttype, BitSet follow) throws RecognitionException { throw new MismatchedTokenException(ttype, input); } + @Override public void recoverFromMismatchedSet(IntStream input, RecognitionException re, BitSet follow) throws RecognitionException { throw re; } + @Override public void displayRecognitionError(String[] tokenNames, RecognitionException e) { errors.add(new ParseError(this, e, tokenNames)); } + @Override + public String getErrorHeader(RecognitionException e) { + String header = null; + if (e.charPositionInLine < 0 && input.LT(-1) != null) { + Token t = input.LT(-1); + header = "line " + t.getLine() + ":" + t.getCharPositionInLine(); + } else { + header = super.getErrorHeader(e); + } + + return header; + } + + + @Override public String getErrorMessage(RecognitionException e, String[] tokenNames) { String msg = null; - // Transalate the token names to something that the user can understand + // Translate the token names to something that the user can understand String[] xlateNames = new String[tokenNames.length]; for (int i = 0; i < tokenNames.length; ++i) { xlateNames[i] = ParseDriver.xlate(tokenNames[i]); @@ -334,7 +357,16 @@ public class ParseDriver { // "decision=<<"+nvae.grammarDecisionDescription+">>" // and "(decision="+nvae.decisionNumber+") and // "state "+nvae.stateNumber - msg = "cannot recognize input " + getTokenErrorDisplay(e.token); + msg = "cannot recognize input near " + + getTokenErrorDisplay(e.token) + + (input.LT(2) != null ? " " + getTokenErrorDisplay(input.LT(2)) : "") + + (input.LT(3) != null ? " " + getTokenErrorDisplay(input.LT(3)) : ""); + } else if (e instanceof MismatchedTokenException) { + MismatchedTokenException mte = (MismatchedTokenException) e; + msg = super.getErrorMessage(e, xlateNames) + (input.LT(-1) == null ? "":" near '" + input.LT(-1).getText()) + "'"; + } else if (e instanceof FailedPredicateException) { + FailedPredicateException fpe = (FailedPredicateException) e; + msg = "Failed to recognize predicate '" + fpe.token.getText() + "'. Failed rule: '" + fpe.ruleName + "'"; } else { msg = super.getErrorMessage(e, xlateNames); } @@ -360,7 +392,7 @@ public class ParseDriver { /** * Creates an ASTNode for the given token. The ASTNode is a wrapper around * antlr's CommonTree class that implements the Node interface. - * + * * @param payload * The token. * @return Object (which is actually an ASTNode) for the token. @@ -378,15 +410,15 @@ public class ParseDriver { /** * Parses a command, optionally assigning the parser's token stream to the * given context. - * + * * @param command * command to parse - * + * * @param ctx * context with which to associate this parser's token stream, or * null if either no context is available or the context already has * an existing stream - * + * * @return parsed AST */ public ASTNode parse(String command, Context ctx) throws ParseException { -- 1.7.4.4