diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 6ed5b13..29936d9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -451,6 +451,7 @@ INVALID_PK_SYNTAX(10326, "Invalid Primary Key syntax"), ACID_NOT_ENOUGH_HISTORY(10327, "Not enough history available for ({0},{1}). " + "Oldest available base: {2}", true), + INVALID_COLUMN_NAME(10328, "Invalid column name"), //========================== 20000 range starts here ========================// SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index db7aeef..e0e9b12 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -58,11 +58,7 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.InvalidTableException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -670,6 +666,7 @@ private static void processPrimaryKeyInfos( throw new SemanticException( ErrorMsg.INVALID_PK_SYNTAX.getMsg(" VALIDATE feature not supported yet")); } + checkColumnName(grandChild.getText()); pkInfos.add( new PKInfo( unescapeIdentifier(grandChild.getText().toLowerCase()), @@ -783,6 +780,7 @@ protected static void processForeignKeys( for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) { SQLForeignKey sqlForeignKey = new SQLForeignKey(); Tree fkgrandChild = child.getChild(fkIndex).getChild(j); + checkColumnName(fkgrandChild.getText()); boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE; boolean enable = child.getChild(relyIndex+1).getType() == HiveParser.TOK_ENABLE; boolean validate = child.getChild(relyIndex+2).getType() == HiveParser.TOK_VALIDATE; @@ -810,6 +808,12 @@ protected static void processForeignKeys( } } + private static void checkColumnName(String columnName) throws SemanticException { + if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(columnName.toUpperCase())) { + throw new SemanticException(ErrorMsg.INVALID_COLUMN_NAME.getMsg(columnName)); + } + } + /** * Get the list of FieldSchema out of the ASTNode. * Additionally, populate the primaryKeys and foreignKeys if any. @@ -837,6 +841,7 @@ protected static void processForeignKeys( if(lowerCase) { name = name.toLowerCase(); } + checkColumnName(name); // child 0 is the name of the column col.setName(unescapeIdentifier(name)); // child 1 is the type of the column