Index: odbc/src/cpp/hiveclienthelper.cpp =================================================================== --- odbc/src/cpp/hiveclienthelper.cpp (revision 832763) +++ odbc/src/cpp/hiveclienthelper.cpp (working copy) @@ -17,6 +17,7 @@ */ #include +#include #include "hiveclienthelper.h" #include "thriftserverconstants.h" Index: ql/src/test/results/clientpositive/ctas.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas.q.out (revision 832763) +++ ql/src/test/results/clientpositive/ctas.q.out (working copy) @@ -18,12 +18,12 @@ PREHOOK: type: DROPTABLE POSTHOOK: query: drop table nzhang_ctas5 POSTHOOK: type: DROPTABLE -PREHOOK: query: explain create table nzhang_ctas1 as select key k, value from src sort by k, value limit 10 +PREHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 PREHOOK: type: CREATETABLE -POSTHOOK: query: explain create table nzhang_ctas1 as select key k, value from src sort by k, value limit 10 +POSTHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE ABSTRACT SYNTAX TREE: - (TOK_CREATETABLE nzhang_ctas1 TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key) k) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))) (TOK_LIMIT 10)))) + (TOK_CREATETABLE nzhang_CTAS1 TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key) k) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))) (TOK_LIMIT 10)))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -71,7 +71,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/551361482/10002 + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1224170821/10002 Reduce Output Operator key expressions: expr: _col0 @@ -99,7 +99,7 @@ Move Operator files: hdfs directory: true - destination: file:///data/users/nzhang/work/876/apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas1 + destination: file:///Users/nzhang/work/apache-hive-trunk/ql/../build/ql/test/data/warehouse/nzhang_ctas1 Stage: Stage-3 Create Table Operator: @@ -113,21 +113,21 @@ isExternal: false -PREHOOK: query: create table nzhang_ctas1 as select key k, value from src sort by k, value limit 10 +PREHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 PREHOOK: type: CREATETABLE PREHOOK: Input: default@src -POSTHOOK: query: create table nzhang_ctas1 as select key k, value from src sort by k, value limit 10 +POSTHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE POSTHOOK: Input: default@src POSTHOOK: Output: default@nzhang_ctas1 -PREHOOK: query: select * from nzhang_ctas1 +PREHOOK: query: select * from nzhang_CTAS1 PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_ctas1 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/170193651/10000 -POSTHOOK: query: select * from nzhang_ctas1 +PREHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1074875505/10000 +POSTHOOK: query: select * from nzhang_CTAS1 POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_ctas1 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/170193651/10000 +POSTHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1074875505/10000 0 val_0 0 val_0 0 val_0 @@ -191,7 +191,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1299285756/10002 + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1585743444/10002 Reduce Output Operator key expressions: expr: _col0 @@ -219,7 +219,7 @@ Move Operator files: hdfs directory: true - destination: file:///data/users/nzhang/work/876/apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas2 + destination: file:///Users/nzhang/work/apache-hive-trunk/ql/../build/ql/test/data/warehouse/nzhang_ctas2 Stage: Stage-3 Create Table Operator: @@ -243,11 +243,11 @@ PREHOOK: query: select * from nzhang_ctas2 PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_ctas2 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1593006253/10000 +PREHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1392163509/10000 POSTHOOK: query: select * from nzhang_ctas2 POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_ctas2 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1593006253/10000 +POSTHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1392163509/10000 0 val_0 0 val_0 0 val_0 @@ -311,7 +311,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/115382/10002 + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1785158141/10002 Reduce Output Operator key expressions: expr: _col0 @@ -339,7 +339,7 @@ Move Operator files: hdfs directory: true - destination: file:///data/users/nzhang/work/876/apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas3 + destination: file:///Users/nzhang/work/apache-hive-trunk/ql/../build/ql/test/data/warehouse/nzhang_ctas3 Stage: Stage-3 Create Table Operator: @@ -364,11 +364,11 @@ PREHOOK: query: select * from nzhang_ctas3 PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_ctas3 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/263363884/10000 +PREHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/317733487/10000 POSTHOOK: query: select * from nzhang_ctas3 POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_ctas3 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/263363884/10000 +POSTHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/317733487/10000 0.0 val_0_con 0.0 val_0_con 0.0 val_0_con @@ -397,11 +397,11 @@ PREHOOK: query: select * from nzhang_ctas3 PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_ctas3 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/103710196/10000 +PREHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1280670231/10000 POSTHOOK: query: select * from nzhang_ctas3 POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_ctas3 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/103710196/10000 +POSTHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1280670231/10000 0.0 val_0_con 0.0 val_0_con 0.0 val_0_con @@ -465,7 +465,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/473374758/10002 + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/1973910867/10002 Reduce Output Operator key expressions: expr: _col0 @@ -493,7 +493,7 @@ Move Operator files: hdfs directory: true - destination: file:///data/users/nzhang/work/876/apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas4 + destination: file:///Users/nzhang/work/apache-hive-trunk/ql/../build/ql/test/data/warehouse/nzhang_ctas4 Stage: Stage-3 Create Table Operator: @@ -518,11 +518,11 @@ PREHOOK: query: select * from nzhang_ctas4 PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_ctas4 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1232078865/10000 +PREHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/781832344/10000 POSTHOOK: query: select * from nzhang_ctas4 POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_ctas4 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1232078865/10000 +POSTHOOK: Output: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/781832344/10000 0 val_0 0 val_0 0 val_0 @@ -575,9 +575,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/nzhang/work/876/apache-hive/build/ql/test/data/warehouse/src [src] + file:/Users/nzhang/work/apache-hive-trunk/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/data/users/nzhang/work/876/apache-hive/build/ql/test/data/warehouse/src + file:/Users/nzhang/work/apache-hive-trunk/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -592,8 +592,8 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/nzhang/work/876/apache-hive/build/ql/test/data/warehouse/src - transient_lastDdlTime 1255728382 + location file:/Users/nzhang/work/apache-hive-trunk/build/ql/test/data/warehouse/src + transient_lastDdlTime 1257346497 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Reduce Operator Tree: @@ -602,7 +602,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10002 + directory: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10002 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -614,7 +614,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10002 + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10002 Reduce Output Operator key expressions: expr: _col0 @@ -630,9 +630,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10002 [file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10002] + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10002 [file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10002] Path -> Partition: - file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10002 + file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10002 Partition input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -647,7 +647,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10001 + directory: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -662,8 +662,8 @@ Move Operator files: hdfs directory: true - source: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/751741251/10001 - destination: file:///data/users/nzhang/work/876/apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas5 + source: file:/Users/nzhang/work/apache-hive-trunk/build/ql/tmp/527049251/10001 + destination: file:///Users/nzhang/work/apache-hive-trunk/ql/../build/ql/test/data/warehouse/nzhang_ctas5 Stage: Stage-3 Create Table Operator: Index: ql/src/test/queries/clientpositive/ctas.q =================================================================== --- ql/src/test/queries/clientpositive/ctas.q (revision 832763) +++ ql/src/test/queries/clientpositive/ctas.q (working copy) @@ -4,11 +4,11 @@ drop table nzhang_ctas4; drop table nzhang_ctas5; -explain create table nzhang_ctas1 as select key k, value from src sort by k, value limit 10; +explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10; -create table nzhang_ctas1 as select key k, value from src sort by k, value limit 10; +create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10; -select * from nzhang_ctas1; +select * from nzhang_CTAS1; explain create table nzhang_ctas2 as select * from src sort by key, value limit 10; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 832763) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -4810,7 +4810,7 @@ */ private ASTNode analyzeCreateTable(ASTNode ast, QB qb) throws SemanticException { - String tableName = unescapeIdentifier(ast.getChild(0).getText()); + String tableName = unescapeIdentifier(ast.getChild(0).getText().toLowerCase()); String likeTableName = null; List cols = null; List partCols = null;