Index: ql/src/test/results/clientnegative/invalid_t_create1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create1.q.out (revision 0) +++ ql/src/test/results/clientnegative/invalid_t_create1.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead. Index: ql/src/test/results/clientnegative/invalid_t_create3.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create3.q.out (revision 0) +++ ql/src/test/results/clientnegative/invalid_t_create3.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead. Index: ql/src/test/results/clientnegative/invalid_t_alter1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_alter1.q.out (revision 0) +++ ql/src/test/results/clientnegative/invalid_t_alter1.q.out (revision 0) @@ -0,0 +1,6 @@ +PREHOOK: query: CREATE TABLE alter_test (d STRING) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE alter_test (d STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@alter_test +FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead. Index: ql/src/test/results/clientnegative/invalid_create_tbl1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 4662) +++ ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (working copy) @@ -2,13 +2,4 @@ PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE inv_valid_tbl1 POSTHOOK: type: DROPTABLE -PREHOOK: query: CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table' - PARTITIONED BY(aint DATETIME, country STRING) - CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS - ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' - WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', - 'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol') - STORED AS SEQUENCEFILE -PREHOOK: type: CREATETABLE -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition column name aint conflicts with table columns. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead. Index: ql/src/test/results/clientnegative/invalid_t_transform.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_transform.q.out (revision 0) +++ ql/src/test/results/clientnegative/invalid_t_transform.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead. Index: ql/src/test/results/clientnegative/invalid_t_create2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create2.q.out (revision 0) +++ ql/src/test/results/clientnegative/invalid_t_create2.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead. Index: ql/src/test/results/clientnegative/invalid_t_alter2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_alter2.q.out (revision 0) +++ ql/src/test/results/clientnegative/invalid_t_alter2.q.out (revision 0) @@ -0,0 +1,4 @@ +PREHOOK: query: CREATE TABLE alter_test (d STRING) +PREHOOK: type: CREATETABLE +FAILED: Error in metadata: AlreadyExistsException(message:Table alter_test already exists) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientpositive/inputddl8.q.out =================================================================== --- ql/src/test/results/clientpositive/inputddl8.q.out (revision 4662) +++ ql/src/test/results/clientpositive/inputddl8.q.out (working copy) @@ -3,7 +3,7 @@ POSTHOOK: query: DROP TABLE INPUTDDL8 POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' - PARTITIONED BY(ds DATETIME, country STRING) + PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', @@ -11,7 +11,7 @@ STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' - PARTITIONED BY(ds DATETIME, country STRING) + PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', @@ -29,10 +29,10 @@ lstring array from deserializer lintstring array from deserializer mstringstring map from deserializer -ds datetime +ds string country string -Detailed Table Information Table(tableName:inputddl8, dbName:default, owner:njain, createTime:1253780774, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/inputddl8, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:32, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer, parameters:{serialization.class=org.apache.hadoop.hive.serde2.thrift.test.Complex,serialization.format=com.facebook.thrift.protocol.TBinaryProtocol}), bucketCols:[aint], sortCols:[Order(col:lint, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:datetime, comment:null), FieldSchema(name:country, type:string, comment:null)], parameters:{comment=This is a thrift based table}) +Detailed Table Information Table(tableName:inputddl8, dbName:default, owner:pyang, createTime:1264209638, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[], location:file:/data/users/pyang/task2/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/inputddl8, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:32, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer, parameters:{serialization.class=org.apache.hadoop.hive.serde2.thrift.test.Complex,serialization.format=com.facebook.thrift.protocol.TBinaryProtocol}), bucketCols:[aint], sortCols:[Order(col:lint, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:country, type:string, comment:null)], parameters:{transient_lastDdlTime=1264209638,comment=This is a thrift based table}, viewOriginalText:null, viewExpandedText:null) PREHOOK: query: DROP TABLE INPUTDDL8 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE INPUTDDL8 Index: ql/src/test/results/clientpositive/show_tables.q.out =================================================================== --- ql/src/test/results/clientpositive/show_tables.q.out (revision 4662) +++ ql/src/test/results/clientpositive/show_tables.q.out (working copy) @@ -1,11 +1,11 @@ -PREHOOK: query: CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE +PREHOOK: query: CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE -POSTHOOK: query: CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE +POSTHOOK: query: CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@shtb_test1 -PREHOOK: query: CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE +PREHOOK: query: CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE -POSTHOOK: query: CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE +POSTHOOK: query: CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@shtb_test2 PREHOOK: query: EXPLAIN Index: ql/src/test/results/clientpositive/inputddl6.q.out =================================================================== --- ql/src/test/results/clientpositive/inputddl6.q.out (revision 4662) +++ ql/src/test/results/clientpositive/inputddl6.q.out (working copy) @@ -8,9 +8,9 @@ -- test for alter table drop partition DROP TABLE INPUTDDL6 POSTHOOK: type: DROPTABLE -PREHOOK: query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE +PREHOOK: query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE -POSTHOOK: query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE +POSTHOOK: query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@INPUTDDL6 PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09') @@ -29,18 +29,18 @@ POSTHOOK: type: DESCTABLE key string value string -ds datetime +ds string -Detailed Table Information Table(tableName:inputddl6, dbName:default, owner:njain, createTime:1253780755, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/inputddl6, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:datetime, comment:null)], parameters:{}) +Detailed Table Information Table(tableName:inputddl6, dbName:default, owner:pyang, createTime:1264209075, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/pyang/task2/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/inputddl6, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{transient_lastDdlTime=1264209075}, viewOriginalText:null, viewExpandedText:null) PREHOOK: query: DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-08') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-08') POSTHOOK: type: DESCTABLE key string value string -ds datetime +ds string -Detailed Partition Information Partition(values:[2008-04-08], dbName:default, tableName:inputddl6, createTime:0, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/inputddl6/ds=2008-04-08, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{}) +Detailed Partition Information Partition(values:[2008-04-08], dbName:default, tableName:inputddl6, createTime:1264209075, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/pyang/task2/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/inputddl6/ds=2008-04-08, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1264209075}) PREHOOK: query: SHOW PARTITIONS INPUTDDL6 PREHOOK: type: SHOWPARTITIONS POSTHOOK: query: SHOW PARTITIONS INPUTDDL6 Index: ql/src/test/results/clientpositive/inputddl4.q.out =================================================================== --- ql/src/test/results/clientpositive/inputddl4.q.out (revision 4662) +++ ql/src/test/results/clientpositive/inputddl4.q.out (working copy) @@ -9,7 +9,7 @@ friends ARRAY, properties MAP, ip STRING COMMENT 'IP Address of the User') COMMENT 'This is the page view table' - PARTITIONED BY(ds DATETIME, country STRING) + PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE INPUTDDL4(viewTime STRING, userid INT, @@ -17,7 +17,7 @@ friends ARRAY, properties MAP, ip STRING COMMENT 'IP Address of the User') COMMENT 'This is the page view table' - PARTITIONED BY(ds DATETIME, country STRING) + PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@INPUTDDL4 @@ -32,7 +32,7 @@ friends array properties map ip string IP Address of the User -ds datetime +ds string country string PREHOOK: query: DESCRIBE EXTENDED INPUTDDL4 PREHOOK: type: DESCTABLE @@ -45,10 +45,10 @@ friends array properties map ip string IP Address of the User -ds datetime +ds string country string -Detailed Table Information Table(tableName:inputddl4, dbName:default, owner:njain, createTime:1253780744, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:viewtime, type:string, comment:null), FieldSchema(name:userid, type:int, comment:null), FieldSchema(name:page_url, type:string, comment:null), FieldSchema(name:referrer_url, type:string, comment:null), FieldSchema(name:friends, type:array, comment:null), FieldSchema(name:properties, type:map, comment:null), FieldSchema(name:ip, type:string, comment:IP Address of the User)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/inputddl4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:32, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[userid], sortCols:[Order(col:viewtime, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:datetime, comment:null), FieldSchema(name:country, type:string, comment:null)], parameters:{comment=This is the page view table}) +Detailed Table Information Table(tableName:inputddl4, dbName:default, owner:pyang, createTime:1264208851, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:viewtime, type:string, comment:null), FieldSchema(name:userid, type:int, comment:null), FieldSchema(name:page_url, type:string, comment:null), FieldSchema(name:referrer_url, type:string, comment:null), FieldSchema(name:friends, type:array, comment:null), FieldSchema(name:properties, type:map, comment:null), FieldSchema(name:ip, type:string, comment:IP Address of the User)], location:file:/data/users/pyang/task2/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/inputddl4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:32, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[userid], sortCols:[Order(col:viewtime, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:country, type:string, comment:null)], parameters:{transient_lastDdlTime=1264208851,comment=This is the page view table}, viewOriginalText:null, viewExpandedText:null) PREHOOK: query: DROP TABLE INPUTDDL4 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE INPUTDDL4 Index: ql/src/test/results/clientpositive/inputddl2.q.out =================================================================== --- ql/src/test/results/clientpositive/inputddl2.q.out (revision 4662) +++ ql/src/test/results/clientpositive/inputddl2.q.out (working copy) @@ -1,11 +1,11 @@ PREHOOK: query: EXPLAIN -CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE +CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: EXPLAIN -CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE +CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE ABSTRACT SYNTAX TREE: - (TOK_CREATETABLE INPUTDDL2 TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEPARTCOLS (TOK_TABCOLLIST (TOK_TABCOL ds TOK_DATETIME) (TOK_TABCOL country TOK_STRING))) TOK_TBLTEXTFILE) + (TOK_CREATETABLE INPUTDDL2 TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEPARTCOLS (TOK_TABCOLLIST (TOK_TABCOL ds TOK_STRING) (TOK_TABCOL country TOK_STRING))) TOK_TBLTEXTFILE) STAGE DEPENDENCIES: Stage-0 is a root stage @@ -19,14 +19,14 @@ input format: org.apache.hadoop.mapred.TextInputFormat # buckets: -1 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - partition columns: ds datetime, country string + partition columns: ds string, country string name: INPUTDDL2 isExternal: false -PREHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE +PREHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE -POSTHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE +POSTHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@INPUTDDL2 PREHOOK: query: DESCRIBE INPUTDDL2 @@ -35,7 +35,7 @@ POSTHOOK: type: DESCTABLE key int value string -ds datetime +ds string country string PREHOOK: query: DROP TABLE INPUTDDL2 PREHOOK: type: DROPTABLE Index: ql/src/test/queries/clientnegative/invalid_t_alter1.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_alter1.q (revision 0) +++ ql/src/test/queries/clientnegative/invalid_t_alter1.q (revision 0) @@ -0,0 +1,2 @@ +CREATE TABLE alter_test (d STRING); +ALTER TABLE alter_test CHANGE d d DATE; Index: ql/src/test/queries/clientnegative/invalid_t_transform.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_transform.q (revision 0) +++ ql/src/test/queries/clientnegative/invalid_t_transform.q (revision 0) @@ -0,0 +1 @@ +SELECT TRANSFORM(*) USING 'cat' AS (key DATE) FROM src; Index: ql/src/test/queries/clientnegative/invalid_t_create2.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_create2.q (revision 0) +++ ql/src/test/queries/clientnegative/invalid_t_create2.q (revision 0) @@ -0,0 +1 @@ +CREATE TABLE datetime_test (d DATETIME); Index: ql/src/test/queries/clientnegative/invalid_t_alter2.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_alter2.q (revision 0) +++ ql/src/test/queries/clientnegative/invalid_t_alter2.q (revision 0) @@ -0,0 +1,2 @@ +CREATE TABLE alter_test (d STRING); +ALTER TABLE alter_test ADD COLUMNS (ds DATE); Index: ql/src/test/queries/clientnegative/invalid_t_create1.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_create1.q (revision 0) +++ ql/src/test/queries/clientnegative/invalid_t_create1.q (revision 0) @@ -0,0 +1 @@ +CREATE TABLE date_test (d DATE); Index: ql/src/test/queries/clientnegative/invalid_t_create3.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_create3.q (revision 0) +++ ql/src/test/queries/clientnegative/invalid_t_create3.q (revision 0) @@ -0,0 +1 @@ +CREATE TABLE timestamp_test (d TIMESTAMP); Index: ql/src/test/queries/clientpositive/show_tables.q =================================================================== --- ql/src/test/queries/clientpositive/show_tables.q (revision 4662) +++ ql/src/test/queries/clientpositive/show_tables.q (working copy) @@ -1,5 +1,5 @@ -CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE; -CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE; +CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE; +CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE; EXPLAIN SHOW TABLES 'shtb_*'; Index: ql/src/test/queries/clientpositive/inputddl4.q =================================================================== --- ql/src/test/queries/clientpositive/inputddl4.q (revision 4662) +++ ql/src/test/queries/clientpositive/inputddl4.q (working copy) @@ -5,7 +5,7 @@ friends ARRAY, properties MAP, ip STRING COMMENT 'IP Address of the User') COMMENT 'This is the page view table' - PARTITIONED BY(ds DATETIME, country STRING) + PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS; DESCRIBE INPUTDDL4; DESCRIBE EXTENDED INPUTDDL4; Index: ql/src/test/queries/clientpositive/inputddl6.q =================================================================== --- ql/src/test/queries/clientpositive/inputddl6.q (revision 4662) +++ ql/src/test/queries/clientpositive/inputddl6.q (working copy) @@ -2,7 +2,7 @@ -- test for describe extended table partition -- test for alter table drop partition DROP TABLE INPUTDDL6; -CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE; +CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09'); LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08'); DESCRIBE EXTENDED INPUTDDL6; Index: ql/src/test/queries/clientpositive/inputddl8.q =================================================================== --- ql/src/test/queries/clientpositive/inputddl8.q (revision 4662) +++ ql/src/test/queries/clientpositive/inputddl8.q (working copy) @@ -1,6 +1,6 @@ DROP TABLE INPUTDDL8; CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' - PARTITIONED BY(ds DATETIME, country STRING) + PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', Index: ql/src/test/queries/clientpositive/inputddl2.q =================================================================== --- ql/src/test/queries/clientpositive/inputddl2.q (revision 4662) +++ ql/src/test/queries/clientpositive/inputddl2.q (working copy) @@ -1,6 +1,6 @@ EXPLAIN -CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE; -CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE; +CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE; +CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE; DESCRIBE INPUTDDL2; DROP TABLE INPUTDDL2; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (revision 4662) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (working copy) @@ -118,7 +118,8 @@ "The same output cannot be present multiple times: "), INVALID_AS( "AS clause has an invalid number of aliases"), VIEW_COL_MISMATCH( "The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW"), DML_AGAINST_VIEW( - "A view cannot be used as target table for LOAD or INSERT"); + "A view cannot be used as target table for LOAD or INSERT"), UNSUPPORTED_TYPE( + "DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead."); private String mesg; private String SQLState; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 4662) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -76,7 +76,12 @@ .put(HiveParser.TOK_TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME); } - public static String getTypeName(int token) { + public static String getTypeName(int token) throws SemanticException { + // date, datetime, and timestamp types aren't currently supported + if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME || + token == HiveParser.TOK_TIMESTAMP ) { + throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg()); + } return TokenToTypeName.get(token); }