Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 927179) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -336,18 +336,26 @@ return null; } + protected List getColumns(ASTNode ast) throws SemanticException { + return getColumns(ast, true); + } + /** * Get the list of FieldSchema out of the ASTNode. */ - protected List getColumns(ASTNode ast) throws SemanticException { + protected List getColumns(ASTNode ast, boolean lowerCase) throws SemanticException { List colList = new ArrayList(); int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { FieldSchema col = new FieldSchema(); ASTNode child = (ASTNode) ast.getChild(i); + String name = child.getChild(0).getText(); + if(lowerCase) { + name = name.toLowerCase(); + } // child 0 is the name of the column - col.setName(unescapeIdentifier(child.getChild(0).getText())); + col.setName(unescapeIdentifier(name)); // child 1 is the type of the column ASTNode typeChild = (ASTNode) (child.getChild(1)); col.setType(getTypeStringFromAST(typeChild)); @@ -366,7 +374,7 @@ int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { ASTNode child = (ASTNode) ast.getChild(i); - colList.add(unescapeIdentifier(child.getText())); + colList.add(unescapeIdentifier(child.getText()).toLowerCase()); } return colList; } @@ -377,10 +385,10 @@ for (int i = 0; i < numCh; i++) { ASTNode child = (ASTNode) ast.getChild(i); if (child.getToken().getType() == HiveParser.TOK_TABSORTCOLNAMEASC) { - colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()), + colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), HIVE_COLUMN_ORDER_ASC)); } else { - colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()), + colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), HIVE_COLUMN_ORDER_DESC)); } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 927179) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -6323,7 +6323,7 @@ comment = unescapeSQLString(child.getChild(0).getText()); break; case HiveParser.TOK_TABLEPARTCOLS: - partCols = getColumns((ASTNode) child.getChild(0)); + partCols = getColumns((ASTNode) child.getChild(0), false); break; case HiveParser.TOK_TABLEBUCKETS: bucketCols = getColumnNames((ASTNode) child.getChild(0)); Index: ql/src/test/queries/clientpositive/ct_case_insensitive.q =================================================================== --- ql/src/test/queries/clientpositive/ct_case_insensitive.q (revision 0) +++ ql/src/test/queries/clientpositive/ct_case_insensitive.q (revision 0) @@ -0,0 +1,5 @@ +DROP TABLE tmp_pyang_bucket3; +CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) INTO 32 BUCKETS; +DROP TABLE tmp_pyang_bucket3; +CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) SORTED BY (USERID) INTO 32 BUCKETS; +DROP TABLE tmp_pyang_bucket3; Index: ql/src/test/results/clientpositive/ct_case_insensitive.q.out =================================================================== --- ql/src/test/results/clientpositive/ct_case_insensitive.q.out (revision 0) +++ ql/src/test/results/clientpositive/ct_case_insensitive.q.out (revision 0) @@ -0,0 +1,24 @@ +PREHOOK: query: DROP TABLE tmp_pyang_bucket3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE tmp_pyang_bucket3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) INTO 32 BUCKETS +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) INTO 32 BUCKETS +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_pyang_bucket3 +PREHOOK: query: DROP TABLE tmp_pyang_bucket3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE tmp_pyang_bucket3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@tmp_pyang_bucket3 +PREHOOK: query: CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) SORTED BY (USERID) INTO 32 BUCKETS +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) SORTED BY (USERID) INTO 32 BUCKETS +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_pyang_bucket3 +PREHOOK: query: DROP TABLE tmp_pyang_bucket3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE tmp_pyang_bucket3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@tmp_pyang_bucket3