Index: ql/src/test/queries/clientnegative/invalid_t_transform.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_transform.q (revision 1447989) +++ ql/src/test/queries/clientnegative/invalid_t_transform.q (working copy) @@ -1 +1 @@ -SELECT TRANSFORM(*) USING 'cat' AS (key DATE) FROM src; +SELECT TRANSFORM(*) USING 'cat' AS (key DATETIME) FROM src; Index: ql/src/test/queries/clientnegative/invalid_t_alter1.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_alter1.q (revision 1447989) +++ ql/src/test/queries/clientnegative/invalid_t_alter1.q (working copy) @@ -1,2 +1,2 @@ CREATE TABLE alter_test (d STRING); -ALTER TABLE alter_test CHANGE d d DATE; +ALTER TABLE alter_test CHANGE d d DATETIME; Index: ql/src/test/queries/clientnegative/invalid_t_create1.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_create1.q (revision 1447989) +++ ql/src/test/queries/clientnegative/invalid_t_create1.q (working copy) @@ -1 +0,0 @@ -CREATE TABLE date_test (d DATE); Index: ql/src/test/queries/clientnegative/invalid_t_alter2.q =================================================================== --- ql/src/test/queries/clientnegative/invalid_t_alter2.q (revision 1447989) +++ ql/src/test/queries/clientnegative/invalid_t_alter2.q (working copy) @@ -1,2 +1,2 @@ CREATE TABLE alter_test (d STRING); -ALTER TABLE alter_test ADD COLUMNS (ds DATE); +ALTER TABLE alter_test ADD COLUMNS (ds DATETIME); Index: ql/src/test/queries/clientpositive/date_1.q =================================================================== --- ql/src/test/queries/clientpositive/date_1.q (revision 0) +++ ql/src/test/queries/clientpositive/date_1.q (revision 0) @@ -0,0 +1,28 @@ +drop table date_1; + +create table date_1 (d date); +alter table date_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; + +insert overwrite table date_1 + select cast('2011-01-01' as date) from src limit 1; +select cast(d as boolean) from date_1 limit 1; +select cast(d as tinyint) from date_1 limit 1; +select cast(d as smallint) from date_1 limit 1; +select cast(d as int) from date_1 limit 1; +select cast(d as bigint) from date_1 limit 1; +select cast(d as float) from date_1 limit 1; +select cast(d as double) from date_1 limit 1; +select cast(d as string) from date_1 limit 1; + +insert overwrite table date_1 + select '2011-01-01' from src limit 1; +select cast(d as boolean) from date_1 limit 1; +select cast(d as tinyint) from date_1 limit 1; +select cast(d as smallint) from date_1 limit 1; +select cast(d as int) from date_1 limit 1; +select cast(d as bigint) from date_1 limit 1; +select cast(d as float) from date_1 limit 1; +select cast(d as double) from date_1 limit 1; +select cast(d as string) from date_1 limit 1; + +drop table date_1; Index: ql/src/test/queries/clientpositive/date_3.q =================================================================== --- ql/src/test/queries/clientpositive/date_3.q (revision 0) +++ ql/src/test/queries/clientpositive/date_3.q (revision 0) @@ -0,0 +1,18 @@ +drop table date_3; + +create table date_3 (d date); +alter table date_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; + +insert overwrite table date_3 + select cast(cast('1.3041352164485E9' as double) as date) from src limit 1; +select cast(d as boolean) from date_3 limit 1; +select cast(d as tinyint) from date_3 limit 1; +select cast(d as smallint) from date_3 limit 1; +select cast(d as int) from date_3 limit 1; +select cast(d as bigint) from date_3 limit 1; +select cast(d as float) from date_3 limit 1; +select cast(d as double) from date_3 limit 1; +select cast(d as string) from date_3 limit 1; +select cast(d as timestamp) from date_3 limit 1; + +drop table date_3; Index: ql/src/test/queries/clientpositive/date_comparison.q =================================================================== --- ql/src/test/queries/clientpositive/date_comparison.q (revision 0) +++ ql/src/test/queries/clientpositive/date_comparison.q (revision 0) @@ -0,0 +1,22 @@ + +select cast('2011-05-06' as timestamp) > + cast('2011-05-06' as timestamp) from src limit 1; + +select cast('2011-05-06' as timestamp) < + cast('2011-05-06' as timestamp) from src limit 1; + +select cast('2011-05-06' as timestamp) = + cast('2011-05-06' as timestamp) from src limit 1; + +select cast('2011-05-06' as timestamp) <> + cast('2011-05-06' as timestamp) from src limit 1; + +select cast('2011-05-06' as timestamp) >= + cast('2011-05-06' as timestamp) from src limit 1; + +select cast('2011-05-06' as timestamp) <= + cast('2011-05-06' as timestamp) from src limit 1; + +select cast('2011-05-06' as timestamp) >= + cast('2011-05-06' as timestamp) from src limit 1; + Index: ql/src/test/queries/clientpositive/date_udf.q =================================================================== --- ql/src/test/queries/clientpositive/date_udf.q (revision 0) +++ ql/src/test/queries/clientpositive/date_udf.q (revision 0) @@ -0,0 +1,38 @@ +drop table date_udf; +drop table date_udf_string; + +create table date_udf (d date); +create table date_udf_string (d string); +from src + insert overwrite table date_udf + select '2011-05-06' limit 1 + insert overwrite table date_udf_string + select '2011-05-06' limit 1; + +-- Test UDFs with date input +select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), + weekofyear(d), to_date(d) + from date_udf; + +select date_add(d, 5), date_sub(d, 10) + from date_udf; + +select datediff(d, d), datediff(d, '2002-03-21'), datediff('2002-03-21', d), + datediff(cast ('2002-03-21 00:00:00' as timestamp), d), + datediff(d, cast ('2002-03-21 00:00:00' as timestamp)) + from date_udf; + +-- Test UDFs with string input +select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), + weekofyear(d), to_date(d) + from date_udf_string; + +select date_add(d, 5), date_sub(d, 10) from date_udf_string; + +select datediff(d, d), datediff(d, '2002-03-21'), datediff('2002-03-21', d), + datediff('2002-03-21 00:00:00', d), + datediff(d, '2002-03-21 00:00:00') + from date_udf_string; + +drop table date_udf; +drop table date_udf_string; Index: ql/src/test/queries/clientpositive/date_2.q =================================================================== --- ql/src/test/queries/clientpositive/date_2.q (revision 0) +++ ql/src/test/queries/clientpositive/date_2.q (revision 0) @@ -0,0 +1,28 @@ +drop table date_2; + +create table date_2 (d date); +alter table date_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'; + +insert overwrite table date_2 + select cast('2011-01-01' as date) from src limit 1; +select cast(d as boolean) from date_2 limit 1; +select cast(d as tinyint) from date_2 limit 1; +select cast(d as smallint) from date_2 limit 1; +select cast(d as int) from date_2 limit 1; +select cast(d as bigint) from date_2 limit 1; +select cast(d as float) from date_2 limit 1; +select cast(d as double) from date_2 limit 1; +select cast(d as string) from date_2 limit 1; + +insert overwrite table date_2 + select '2011-01-01' from src limit 1; +select cast(d as boolean) from date_2 limit 1; +select cast(d as tinyint) from date_2 limit 1; +select cast(d as smallint) from date_2 limit 1; +select cast(d as int) from date_2 limit 1; +select cast(d as bigint) from date_2 limit 1; +select cast(d as float) from date_2 limit 1; +select cast(d as double) from date_2 limit 1; +select cast(d as string) from date_2 limit 1; + +drop table date_2; Index: ql/src/test/results/clientnegative/wrong_column_type.q.out =================================================================== --- ql/src/test/results/clientnegative/wrong_column_type.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/wrong_column_type.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(a float) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientnegative/invalid_t_alter2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_alter2.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_t_alter2.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE alter_test (d STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@alter_test -FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATETIME type isn't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientnegative/invalid_create_tbl1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (working copy) @@ -1 +1 @@ -FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATETIME type isn't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientnegative/invalid_t_create1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create1.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_t_create1.q.out (working copy) @@ -1 +0,0 @@ -FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientnegative/invalid_t_alter1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_alter1.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_t_alter1.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE alter_test (d STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@alter_test -FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATETIME type isn't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_t_transform.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_transform.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_t_transform.q.out (working copy) @@ -1 +1 @@ -FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATETIME type isn't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_t_create2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create2.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_t_create2.q.out (working copy) @@ -1 +1 @@ -FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATETIME type isn't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out (revision 1447989) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) _FUNC_(decimal) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(date) _FUNC_(timestamp) _FUNC_(decimal) Index: ql/src/test/results/clientpositive/date_2.q.out =================================================================== --- ql/src/test/results/clientpositive/date_2.q.out (revision 0) +++ ql/src/test/results/clientpositive/date_2.q.out (revision 0) @@ -0,0 +1,218 @@ +PREHOOK: query: drop table date_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table date_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table date_2 (d date) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table date_2 (d date) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@date_2 +PREHOOK: query: alter table date_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@date_2 +PREHOOK: Output: default@date_2 +POSTHOOK: query: alter table date_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@date_2 +POSTHOOK: Output: default@date_2 +PREHOOK: query: insert overwrite table date_2 + select cast('2011-01-01' as date) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@date_2 +POSTHOOK: query: insert overwrite table date_2 + select cast('2011-01-01' as date) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@date_2 +POSTHOOK: Lineage: date_2.d EXPRESSION [] +PREHOOK: query: select cast(d as boolean) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as boolean) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +true +PREHOOK: query: select cast(d as tinyint) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as tinyint) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +0 +PREHOOK: query: select cast(d as smallint) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as smallint) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +-8448 +PREHOOK: query: select cast(d as int) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as int) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as bigint) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as bigint) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as float) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as float) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as double) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as double) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as string) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as string) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +2011-01-01 +PREHOOK: query: insert overwrite table date_2 + select '2011-01-01' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@date_2 +POSTHOOK: query: insert overwrite table date_2 + select '2011-01-01' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@date_2 +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +PREHOOK: query: select cast(d as boolean) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as boolean) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +true +PREHOOK: query: select cast(d as tinyint) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as tinyint) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +0 +PREHOOK: query: select cast(d as smallint) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as smallint) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +-8448 +PREHOOK: query: select cast(d as int) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as int) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as bigint) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as bigint) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as float) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as float) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as double) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as double) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as string) from date_2 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as string) from date_2 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_2 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] +2011-01-01 +PREHOOK: query: drop table date_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@date_2 +PREHOOK: Output: default@date_2 +POSTHOOK: query: drop table date_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@date_2 +POSTHOOK: Output: default@date_2 +POSTHOOK: Lineage: date_2.d EXPRESSION [] +POSTHOOK: Lineage: date_2.d EXPRESSION [] Index: ql/src/test/results/clientpositive/date_udf.q.out =================================================================== --- ql/src/test/results/clientpositive/date_udf.q.out (revision 0) +++ ql/src/test/results/clientpositive/date_udf.q.out (revision 0) @@ -0,0 +1,150 @@ +PREHOOK: query: drop table date_udf +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table date_udf +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table date_udf_string +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table date_udf_string +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table date_udf (d date) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table date_udf (d date) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@date_udf +PREHOOK: query: create table date_udf_string (d string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table date_udf_string (d string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@date_udf_string +PREHOOK: query: from src + insert overwrite table date_udf + select '2011-05-06' limit 1 + insert overwrite table date_udf_string + select '2011-05-06' limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@date_udf +PREHOOK: Output: default@date_udf_string +POSTHOOK: query: from src + insert overwrite table date_udf + select '2011-05-06' limit 1 + insert overwrite table date_udf_string + select '2011-05-06' limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@date_udf +POSTHOOK: Output: default@date_udf_string +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +PREHOOK: query: -- Test UDFs with date input +select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), + weekofyear(d), to_date(d) + from date_udf +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf +#### A masked pattern was here #### +POSTHOOK: query: -- Test UDFs with date input +select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), + weekofyear(d), to_date(d) + from date_udf +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf +#### A masked pattern was here #### +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +1304665200 2011 5 6 6 18 2011-05-06 +PREHOOK: query: select date_add(d, 5), date_sub(d, 10) + from date_udf +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf +#### A masked pattern was here #### +POSTHOOK: query: select date_add(d, 5), date_sub(d, 10) + from date_udf +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf +#### A masked pattern was here #### +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +2011-05-11 2011-04-26 +PREHOOK: query: select datediff(d, d), datediff(d, '2002-03-21'), datediff('2002-03-21', d), + datediff(cast ('2002-03-21 00:00:00' as timestamp), d), + datediff(d, cast ('2002-03-21 00:00:00' as timestamp)) + from date_udf +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf +#### A masked pattern was here #### +POSTHOOK: query: select datediff(d, d), datediff(d, '2002-03-21'), datediff('2002-03-21', d), + datediff(cast ('2002-03-21 00:00:00' as timestamp), d), + datediff(d, cast ('2002-03-21 00:00:00' as timestamp)) + from date_udf +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf +#### A masked pattern was here #### +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +0 3333 -3333 -3332 3332 +PREHOOK: query: -- Test UDFs with string input +select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), + weekofyear(d), to_date(d) + from date_udf_string +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_string +#### A masked pattern was here #### +POSTHOOK: query: -- Test UDFs with string input +select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), + weekofyear(d), to_date(d) + from date_udf_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_string +#### A masked pattern was here #### +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +NULL 2011 5 6 6 18 2011-05-06 +PREHOOK: query: select date_add(d, 5), date_sub(d, 10) from date_udf_string +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_string +#### A masked pattern was here #### +POSTHOOK: query: select date_add(d, 5), date_sub(d, 10) from date_udf_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_string +#### A masked pattern was here #### +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +2011-05-11 2011-04-26 +PREHOOK: query: select datediff(d, d), datediff(d, '2002-03-21'), datediff('2002-03-21', d), + datediff('2002-03-21 00:00:00', d), + datediff(d, '2002-03-21 00:00:00') + from date_udf_string +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_string +#### A masked pattern was here #### +POSTHOOK: query: select datediff(d, d), datediff(d, '2002-03-21'), datediff('2002-03-21', d), + datediff('2002-03-21 00:00:00', d), + datediff(d, '2002-03-21 00:00:00') + from date_udf_string +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_string +#### A masked pattern was here #### +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +0 3333 -3333 -3333 3333 +PREHOOK: query: drop table date_udf +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@date_udf +PREHOOK: Output: default@date_udf +POSTHOOK: query: drop table date_udf +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@date_udf +POSTHOOK: Output: default@date_udf +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] +PREHOOK: query: drop table date_udf_string +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@date_udf_string +PREHOOK: Output: default@date_udf_string +POSTHOOK: query: drop table date_udf_string +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@date_udf_string +POSTHOOK: Output: default@date_udf_string +POSTHOOK: Lineage: date_udf.d EXPRESSION [] +POSTHOOK: Lineage: date_udf_string.d SIMPLE [] Index: ql/src/test/results/clientpositive/date_comparison.q.out =================================================================== --- ql/src/test/results/clientpositive/date_comparison.q.out (revision 0) +++ ql/src/test/results/clientpositive/date_comparison.q.out (revision 0) @@ -0,0 +1,77 @@ +PREHOOK: query: select cast('2011-05-06' as timestamp) > + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) > + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +false +PREHOOK: query: select cast('2011-05-06' as timestamp) < + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) < + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +false +PREHOOK: query: select cast('2011-05-06' as timestamp) = + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) = + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true +PREHOOK: query: select cast('2011-05-06' as timestamp) <> + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) <> + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +false +PREHOOK: query: select cast('2011-05-06' as timestamp) >= + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) >= + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true +PREHOOK: query: select cast('2011-05-06' as timestamp) <= + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) <= + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true +PREHOOK: query: select cast('2011-05-06' as timestamp) >= + cast('2011-05-06' as timestamp) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select cast('2011-05-06' as timestamp) >= + cast('2011-05-06' as timestamp) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true Index: ql/src/test/results/clientpositive/date_1.q.out =================================================================== --- ql/src/test/results/clientpositive/date_1.q.out (revision 0) +++ ql/src/test/results/clientpositive/date_1.q.out (revision 0) @@ -0,0 +1,218 @@ +PREHOOK: query: drop table date_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table date_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table date_1 (d date) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table date_1 (d date) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@date_1 +PREHOOK: query: alter table date_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@date_1 +PREHOOK: Output: default@date_1 +POSTHOOK: query: alter table date_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@date_1 +POSTHOOK: Output: default@date_1 +PREHOOK: query: insert overwrite table date_1 + select cast('2011-01-01' as date) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@date_1 +POSTHOOK: query: insert overwrite table date_1 + select cast('2011-01-01' as date) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@date_1 +POSTHOOK: Lineage: date_1.d EXPRESSION [] +PREHOOK: query: select cast(d as boolean) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as boolean) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +true +PREHOOK: query: select cast(d as tinyint) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as tinyint) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +0 +PREHOOK: query: select cast(d as smallint) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as smallint) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +-8448 +PREHOOK: query: select cast(d as int) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as int) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as bigint) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as bigint) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as float) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as float) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as double) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as double) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as string) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as string) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +2011-01-01 +PREHOOK: query: insert overwrite table date_1 + select '2011-01-01' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@date_1 +POSTHOOK: query: insert overwrite table date_1 + select '2011-01-01' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@date_1 +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +PREHOOK: query: select cast(d as boolean) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as boolean) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +true +PREHOOK: query: select cast(d as tinyint) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as tinyint) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +0 +PREHOOK: query: select cast(d as smallint) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as smallint) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +-8448 +PREHOOK: query: select cast(d as int) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as int) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as bigint) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as bigint) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1293868800 +PREHOOK: query: select cast(d as float) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as float) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as double) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as double) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +1.2938688E9 +PREHOOK: query: select cast(d as string) from date_1 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as string) from date_1 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_1 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] +2011-01-01 +PREHOOK: query: drop table date_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@date_1 +PREHOOK: Output: default@date_1 +POSTHOOK: query: drop table date_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@date_1 +POSTHOOK: Output: default@date_1 +POSTHOOK: Lineage: date_1.d EXPRESSION [] +POSTHOOK: Lineage: date_1.d EXPRESSION [] Index: ql/src/test/results/clientpositive/date_3.q.out =================================================================== --- ql/src/test/results/clientpositive/date_3.q.out (revision 0) +++ ql/src/test/results/clientpositive/date_3.q.out (revision 0) @@ -0,0 +1,127 @@ +PREHOOK: query: drop table date_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table date_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table date_3 (d date) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table date_3 (d date) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@date_3 +PREHOOK: query: alter table date_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@date_3 +PREHOOK: Output: default@date_3 +POSTHOOK: query: alter table date_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@date_3 +POSTHOOK: Output: default@date_3 +PREHOOK: query: insert overwrite table date_3 + select cast(cast('1.3041352164485E9' as double) as date) from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@date_3 +POSTHOOK: query: insert overwrite table date_3 + select cast(cast('1.3041352164485E9' as double) as date) from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@date_3 +POSTHOOK: Lineage: date_3.d EXPRESSION [] +PREHOOK: query: select cast(d as boolean) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as boolean) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +true +PREHOOK: query: select cast(d as tinyint) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as tinyint) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +-16 +PREHOOK: query: select cast(d as smallint) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as smallint) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +25072 +PREHOOK: query: select cast(d as int) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as int) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +1304060400 +PREHOOK: query: select cast(d as bigint) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as bigint) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +1304060400 +PREHOOK: query: select cast(d as float) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as float) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +1.30406042E9 +PREHOOK: query: select cast(d as double) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as double) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +1.3040604E9 +PREHOOK: query: select cast(d as string) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as string) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +2011-04-29 +PREHOOK: query: select cast(d as timestamp) from date_3 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: query: select cast(d as timestamp) from date_3 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_3 +#### A masked pattern was here #### +POSTHOOK: Lineage: date_3.d EXPRESSION [] +2011-04-29 00:00:00 +PREHOOK: query: drop table date_3 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@date_3 +PREHOOK: Output: default@date_3 +POSTHOOK: query: drop table date_3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@date_3 +POSTHOOK: Output: default@date_3 +POSTHOOK: Lineage: date_3.d EXPRESSION [] Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java (revision 1447989) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java (working copy) @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.io.BigDecimalWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; public class TestFunctionRegistry extends TestCase { @@ -41,6 +42,7 @@ public void one(IntWritable x, BigDecimalWritable y) {} public void one(IntWritable x, DoubleWritable y) {} public void one(IntWritable x, IntWritable y) {} + public void mismatch(DateWritable x, BigDecimalWritable y) {} public void mismatch(TimestampWritable x, BigDecimalWritable y) {} public void mismatch(BytesWritable x, DoubleWritable y) {} } @@ -58,6 +60,7 @@ implicit(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.decimalTypeInfo, true); implicit(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo, true); implicit(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo, true); + implicit(TypeInfoFactory.dateTypeInfo, TypeInfoFactory.decimalTypeInfo, false); implicit(TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.decimalTypeInfo, false); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -153,8 +153,8 @@ } public static String getTypeName(int token) throws SemanticException { - // date and datetime types aren't currently supported - if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME) { + // datetime type isn't currently supported + if (token == HiveParser.TOK_DATETIME) { throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg()); } return TokenToTypeName.get(token); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy) @@ -499,6 +499,8 @@ serdeConstants.STRING_TYPE_NAME); conversionFunctionTextHashMap.put(HiveParser.TOK_BINARY, serdeConstants.BINARY_TYPE_NAME); + conversionFunctionTextHashMap.put(HiveParser.TOK_DATE, + serdeConstants.DATE_TYPE_NAME); conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME); conversionFunctionTextHashMap.put(HiveParser.TOK_DECIMAL, Index: ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (working copy) @@ -168,7 +168,7 @@ DYNAMIC_PARTITION_STRICT_MODE(10096, "Dynamic partition strict mode requires at least one " + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"), NONEXISTPARTCOL(10098, "Non-Partition column appears in the partition specification: "), - UNSUPPORTED_TYPE(10099, "DATE and DATETIME types aren't supported yet. Please use " + UNSUPPORTED_TYPE(10099, "DATETIME type isn't supported yet. Please use " + "TIMESTAMP instead"), CREATE_NON_NATIVE_AS(10100, "CREATE TABLE AS SELECT cannot be used for a non-native table"), LOAD_INTO_NON_NATIVE(10101, "A non-native table cannot be used as target for LOAD"), Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -200,6 +200,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUtcTimestamp; @@ -401,6 +402,8 @@ registerUDF(serdeConstants.STRING_TYPE_NAME, UDFToString.class, false, UDFToString.class.getSimpleName()); + registerGenericUDF(serdeConstants.DATE_TYPE_NAME, + GenericUDFToDate.class); registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class); registerGenericUDF(serdeConstants.BINARY_TYPE_NAME, @@ -734,6 +737,11 @@ if (from.equals(TypeInfoFactory.voidTypeInfo)) { return true; } + // Allow implicit String to Date conversion + if (from.equals(TypeInfoFactory.dateTypeInfo) + && to.equals(TypeInfoFactory.stringTypeInfo)) { + return true; + } if (from.equals(TypeInfoFactory.timestampTypeInfo) && to.equals(TypeInfoFactory.stringTypeInfo)) { @@ -1260,7 +1268,8 @@ udfClass == UDFToDouble.class || udfClass == UDFToFloat.class || udfClass == UDFToInteger.class || udfClass == UDFToLong.class || udfClass == UDFToShort.class || udfClass == UDFToString.class || - udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class; + udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class || + udfClass == GenericUDFToDate.class; } /** Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java (working copy) @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; @@ -106,6 +107,15 @@ return evaluate(dateText); } + public LongWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + result.set(d.getTimeInSeconds()); + return result; + } + } + public LongWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (working copy) @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; @@ -133,6 +134,15 @@ return i; } + public Text evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + t.set(d.toString()); + return t; + } + } + public Text evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -69,6 +70,16 @@ } } + public IntWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } + + calendar.setTime(d.get()); + result.set(1 + calendar.get(Calendar.MONTH)); + return result; + } + public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -73,6 +74,16 @@ } } + public IntWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } + + calendar.setTime(d.get()); + result.set(calendar.get(Calendar.YEAR)); + return result; + } + public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -73,6 +74,16 @@ } } + public IntWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } + + calendar.setTime(d.get()); + result.set(calendar.get(Calendar.DAY_OF_MONTH)); + return result; + } + public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (working copy) @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.Text; @@ -67,6 +68,15 @@ } } + public Text evaluate(DateWritable d) { + if (d == null) { + return null; + } + + t.set(formatter.format(d.get())); + return t; + } + public Text evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.FloatWritable; @@ -169,6 +170,15 @@ } } + public DoubleWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + doubleWritable.set((double) d.getTimeInSeconds()); + return doubleWritable; + } + } + public DoubleWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -82,6 +83,17 @@ } } + public Text evaluate(DateWritable d, IntWritable days) { + if (d == null || days == null) { + return null; + } + calendar.setTime(d.get()); + calendar.add(Calendar.DAY_OF_MONTH, -days.get()); + Date newDate = calendar.getTime(); + result.set(formatter.format(newDate)); + return result; + } + public Text evaluate(TimestampWritable t, IntWritable days) { if (t == null || days == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java (working copy) @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; @@ -42,6 +43,7 @@ public class GenericUDFToUnixTimeStamp extends GenericUDF { private StringObjectInspector intputTextOI; + private DateObjectInspector inputDateOI; private TimestampObjectInspector inputTimestampOI; private StringObjectInspector patternOI; @@ -69,6 +71,8 @@ } patternOI = (StringObjectInspector) arguments[1]; } + } else if (arguments[0] instanceof DateObjectInspector) { + inputDateOI = (DateObjectInspector) arguments[0]; } else if (arguments[0] instanceof TimestampObjectInspector) { inputTimestampOI = (TimestampObjectInspector) arguments[0]; } else { @@ -106,7 +110,11 @@ } catch (ParseException e) { return null; } - } + } else if (inputDateOI != null) { + retValue.set(inputDateOI.getPrimitiveWritableObject(arguments[0].get()) + .getTimeInSeconds()); + return retValue; + } Timestamp timestamp = inputTimestampOI.getPrimitiveJavaObject(arguments[0].get()); retValue.set(timestamp.getTime() / 1000); return retValue; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java (working copy) @@ -66,6 +66,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) { @@ -75,6 +76,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFCovarianceSampleEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (working copy) @@ -86,6 +86,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: break; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (working copy) @@ -55,6 +55,7 @@ case FLOAT: case DOUBLE: case STRING: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFVarianceSampleEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (working copy) @@ -54,6 +54,7 @@ case FLOAT: case DOUBLE: case STRING: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFStdSampleEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (working copy) @@ -70,6 +70,7 @@ case FLOAT: case DOUBLE: case STRING: + case DATE: case TIMESTAMP: return new GenericUDAFAverageEvaluator(); case BOOLEAN: Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java (working copy) @@ -101,6 +101,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) { @@ -110,6 +111,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFCorrelationEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (working copy) @@ -63,6 +63,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: return new GenericUDAFSumLong(); case FLOAT: Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java (working copy) @@ -92,6 +92,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) { @@ -101,6 +102,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFCovarianceEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java (revision 0) @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.DateConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +/** + * + * GenericUDFToDate + * + * Example usage: + * ... CAST( as DATE) ... + * + * Creates a DateWritable object using PrimitiveObjectInspectorConverter + * + */ +public class GenericUDFToDate extends GenericUDF { + + private PrimitiveObjectInspector argumentOI; + private DateConverter dc; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + if (arguments.length < 1) { + throw new UDFArgumentLengthException( + "The function CAST as DATE requires at least one argument, got " + + arguments.length); + } + try { + argumentOI = (PrimitiveObjectInspector) arguments[0]; + } catch (ClassCastException e) { + throw new UDFArgumentException( + "The function CAST as DATE takes only primitive types"); + } + + dc = new DateConverter(argumentOI, + PrimitiveObjectInspectorFactory.writableDateObjectInspector); + return PrimitiveObjectInspectorFactory.writableDateObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + Object o0 = arguments[0].get(); + if (o0 == null) { + return null; + } + + return dc.convert(o0); + } + + @Override + public String getDisplayString(String[] children) { + assert (children.length == 1); + StringBuilder sb = new StringBuilder(); + sb.append("CAST( "); + sb.append(children[0]); + sb.append(" AS DATE)"); + return sb.toString(); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (working copy) @@ -92,6 +92,7 @@ case LONG: case FLOAT: case DOUBLE: + case DATE: case TIMESTAMP: case DECIMAL: break; @@ -164,6 +165,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: break; default: Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (working copy) @@ -113,6 +113,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: break; @@ -131,6 +132,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: break; @@ -150,6 +152,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: break; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (working copy) @@ -71,6 +71,7 @@ case FLOAT: case DOUBLE: case STRING: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFVarianceEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (working copy) @@ -55,6 +55,7 @@ case FLOAT: case DOUBLE: case STRING: + case DATE: case TIMESTAMP: case DECIMAL: return new GenericUDAFStdEvaluator(); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (working copy) @@ -132,6 +132,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: break; @@ -152,6 +153,7 @@ case SHORT: case INT: case LONG: + case DATE: case TIMESTAMP: break; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.io.BooleanWritable; @@ -174,6 +175,22 @@ } /** + * Convert from Date to an integer. This is called for CAST(... AS INT) + * + * @param d + * The Date value to convert + * @return IntWritable + */ + public IntWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + intWritable.set((int)d.getTimeInSeconds()); + return intWritable; + } + } + + /** * Convert from Timestamp to an integer. This is called for CAST(... AS INT) * * @param i Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyShort; import org.apache.hadoop.io.BooleanWritable; @@ -173,6 +174,15 @@ } } + public ShortWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + shortWritable.set((short) d.getTimeInSeconds()); + return shortWritable; + } + } + public ShortWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -79,6 +80,26 @@ return evaluate(toDate(dateString), toDate(t)); } + public IntWritable evaluate(Text dateString, DateWritable d) { + return evaluate(toDate(dateString), d.get()); + } + + public IntWritable evaluate(TimestampWritable t, DateWritable d) { + return evaluate(toDate(t), d.get()); + } + + public IntWritable evaluate(DateWritable d1, DateWritable d2) { + return evaluate(d1.get(), d2.get()); + } + + public IntWritable evaluate(DateWritable d, Text dateString) { + return evaluate(d.get(), toDate(dateString)); + } + + public IntWritable evaluate(DateWritable d, TimestampWritable t) { + return evaluate(d.get(), toDate(t)); + } + private IntWritable evaluate(Date date, Date date2) { if (date == null || date2 == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.FloatWritable; @@ -170,6 +171,15 @@ } } + public FloatWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + floatWritable.set((float) d.getTimeInSeconds()); + return floatWritable; + } + } + public FloatWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -82,6 +83,17 @@ } } + public Text evaluate(DateWritable d, IntWritable days) { + if (d == null || days == null) { + return null; + } + calendar.setTime(d.get()); + calendar.add(Calendar.DAY_OF_MONTH, days.get()); + Date newDate = calendar.getTime(); + result.set(formatter.format(newDate)); + return result; + } + public Text evaluate(TimestampWritable t, IntWritable days) { if (t == null || days == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -74,6 +75,16 @@ } } + public IntWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } + + calendar.setTime(d.get()); + result.set(calendar.get(Calendar.WEEK_OF_YEAR)); + return result; + } + public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyLong; import org.apache.hadoop.io.BooleanWritable; @@ -184,6 +185,15 @@ } } + public LongWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + longWritable.set(d.getTimeInSeconds()); + return longWritable; + } + } + public LongWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyByte; import org.apache.hadoop.io.BooleanWritable; @@ -173,6 +174,15 @@ } } + public ByteWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + byteWritable.set((byte)d.getTimeInSeconds()); + return byteWritable; + } + } + public ByteWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (revision 1447989) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.FloatWritable; @@ -166,6 +167,15 @@ } } + public BooleanWritable evaluate(DateWritable d) { + if (d == null) { + return null; + } else { + booleanWritable.set(d.getTimeInSeconds() != 0); + return booleanWritable; + } + } + public BooleanWritable evaluate(TimestampWritable i) { if (i == null) { return null; Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (working copy) @@ -43,6 +43,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.BytesWritable; @@ -232,6 +233,11 @@ out.write(toWrite, 0, toWrite.length); break; } + case DATE: { + LazyDate.writeUTF8(out, + ((DateObjectInspector) oi).getPrimitiveWritableObject(o)); + break; + } case TIMESTAMP: { LazyTimestamp.writeUTF8(out, ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)); Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (working copy) @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector; import org.apache.hadoop.hive.serde2.lazydio.LazyDioBoolean; import org.apache.hadoop.hive.serde2.lazydio.LazyDioByte; @@ -109,6 +110,8 @@ return new LazyDouble((LazyDoubleObjectInspector) oi); case STRING: return new LazyString((LazyStringObjectInspector) oi); + case DATE: + return new LazyDate((LazyDateObjectInspector) oi); case TIMESTAMP: return new LazyTimestamp((LazyTimestampObjectInspector) oi); case BINARY: Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (working copy) @@ -53,6 +53,8 @@ new LazyDoubleObjectInspector(); public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR = new LazyVoidObjectInspector(); + public static final LazyDateObjectInspector LAZY_DATE_OBJECT_INSPECTOR = + new LazyDateObjectInspector(); public static final LazyTimestampObjectInspector LAZY_TIMESTAMP_OBJECT_INSPECTOR = new LazyTimestampObjectInspector(); public static final LazyBinaryObjectInspector LAZY_BINARY_OBJECT_INSPECTOR = @@ -101,6 +103,8 @@ return LAZY_BINARY_OBJECT_INSPECTOR; case VOID: return LAZY_VOID_OBJECT_INSPECTOR; + case DATE: + return LAZY_DATE_OBJECT_INSPECTOR; case TIMESTAMP: return LAZY_TIMESTAMP_OBJECT_INSPECTOR; case DECIMAL: Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java (revision 0) @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; + +import java.sql.Date; + +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.lazy.LazyDate; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; + +public class LazyDateObjectInspector + extends AbstractPrimitiveLazyObjectInspector + implements DateObjectInspector { + + protected LazyDateObjectInspector() { + super(PrimitiveObjectInspectorUtils.dateTypeEntry); + } + + @Override + public Object copyObject(Object o) { + return o == null ? null : new LazyDate((LazyDate) o); + } + + @Override + public Date getPrimitiveJavaObject(Object o) { + return o == null ? null : ((LazyDate) o).getWritableObject().get(); + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java (revision 0) @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazy; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.sql.Date; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector; +import org.apache.hadoop.io.Text; + +/** + * + * LazyDate. + * Serializes and deserializes a Date in the SQL date format + * + * YYYY-MM-DD + * + */ +public class LazyDate extends LazyPrimitive { + static final private Log LOG = LogFactory.getLog(LazyDate.class); + + public LazyDate(LazyDateObjectInspector oi) { + super(oi); + data = new DateWritable(); + } + + public LazyDate(LazyDate copy) { + super(copy); + data = new DateWritable(copy.data); + } + + /** + * Initilizes LazyDate object by interpreting the input bytes + * as a SQL date string + * + * @param bytes + * @param start + * @param length + */ + @Override + public void init(ByteArrayRef bytes, int start, int length) { + String s = null; + try { + s = Text.decode(bytes.getData(), start, length); + data.set(Date.valueOf(s)); + } catch (Exception e) { + isNull = true; + logExceptionMessage(bytes, start, length, "DATE"); + } + } + + /** + * Writes a Date in SQL date format to the output stream + * @param out + * The output stream + * @param i + * The Date to write + * @throws IOException + */ + public static void writeUTF8(OutputStream out, DateWritable d) + throws IOException { + ByteBuffer b = Text.encode(d.toString()); + out.write(b.array(), 0, b.limit()); + } + +} Index: serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (working copy) @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -59,6 +60,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; @@ -368,6 +370,17 @@ return bw; } + case DATE: { + DateWritable d = reuse == null ? new DateWritable() + : (DateWritable) reuse; + long v = buffer.read(invert) ^ 0x80; + for (int i = 0; i < 7; i++) { + v = (v << 8) + (buffer.read(invert) & 0xff); + } + d.set(DateWritable.timeToDate(v)); + return d; + } + case TIMESTAMP: TimestampWritable t = (reuse == null ? new TimestampWritable() : (TimestampWritable) reuse); @@ -666,6 +679,19 @@ serializeBytes(buffer, toSer, ba.getLength(), invert); return; } + case DATE: { + DateObjectInspector doi = (DateObjectInspector) poi; + long v = doi.getPrimitiveWritableObject(o).getTimeInSeconds(); + buffer.write((byte) ((v >> 56) ^ 0x80), invert); + buffer.write((byte) (v >> 48), invert); + buffer.write((byte) (v >> 40), invert); + buffer.write((byte) (v >> 32), invert); + buffer.write((byte) (v >> 24), invert); + buffer.write((byte) (v >> 16), invert); + buffer.write((byte) (v >> 8), invert); + buffer.write((byte) v, invert); + return; + } case TIMESTAMP: { TimestampObjectInspector toi = (TimestampObjectInspector) poi; TimestampWritable t = toi.getPrimitiveWritableObject(o); Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (working copy) @@ -196,6 +196,10 @@ recordInfo.elementOffset = vInt.length; recordInfo.elementSize = vInt.value; break; + case DATE: + recordInfo.elementOffset = 0; + recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]); + break; case TIMESTAMP: recordInfo.elementOffset = 0; recordInfo.elementSize = 4; Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.io.BigDecimalWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; @@ -53,6 +54,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -378,6 +380,11 @@ return warnedOnceNullMapKey; } + case DATE: { + LazyBinaryDate.write(byteStream, + ((DateObjectInspector) poi).getPrimitiveWritableObject(obj)); + return warnedOnceNullMapKey; + } case TIMESTAMP: { TimestampObjectInspector toi = (TimestampObjectInspector) poi; TimestampWritable t = toi.getPrimitiveWritableObject(obj); Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (working copy) @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableVoidObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -72,6 +73,8 @@ return new LazyBinaryString((WritableStringObjectInspector) oi); case VOID: // for NULL return new LazyBinaryVoid((WritableVoidObjectInspector) oi); + case DATE: + return new LazyBinaryDate((WritableDateObjectInspector) oi); case TIMESTAMP: return new LazyBinaryTimestamp((WritableTimestampObjectInspector) oi); case BINARY: Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java (revision 0) @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazybinary; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector; +import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VLong; + +/** + * LazyBinaryDate + * A LazyBinaryObject that encodes a java.sql.Date in a VLong. + * + */ +public class LazyBinaryDate extends + LazyBinaryPrimitive { + static final Log LOG = LogFactory.getLog(LazyBinaryDate.class); + + LazyBinaryDate(WritableDateObjectInspector oi) { + super(oi); + data = new DateWritable(); + } + + LazyBinaryDate(LazyBinaryDate copy) { + super(copy); + data = new DateWritable(copy.data); + } + + /** + * Initializes LazyBinaryDate object + * @param bytes + * @param start + * @param length + */ + @Override + public void init(ByteArrayRef bytes, int start, int length) { + VLong l = new VLong(); + LazyBinaryUtils.readVLong(bytes.getData(), start, l); + data.set(DateWritable.timeToDate(l.value)); + } + + public static void write(Output byteStream, DateWritable d) { + LazyBinaryUtils.writeVLong(byteStream, d.getTimeInSeconds()); + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (working copy) @@ -27,7 +27,7 @@ * The primitive types supported by Hive. */ public static enum PrimitiveCategory { - VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, TIMESTAMP, BINARY, DECIMAL, UNKNOWN + VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, DATE, TIMESTAMP, BINARY, DECIMAL, UNKNOWN }; /** Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector; @@ -99,6 +100,10 @@ return new PrimitiveObjectInspectorConverter.StringConverter( inputOI); } + case DATE: + return new PrimitiveObjectInspectorConverter.DateConverter( + inputOI, + (SettableDateObjectInspector) outputOI); case TIMESTAMP: return new PrimitiveObjectInspectorConverter.TimestampConverter( inputOI, Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy) @@ -31,6 +31,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.BigDecimalWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions; @@ -46,6 +47,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.shims.ShimLoader; @@ -488,6 +490,8 @@ case BINARY: return ((BinaryObjectInspector) poi).getPrimitiveWritableObject(o).hashCode(); + case DATE: + return ((DateObjectInspector) poi).getPrimitiveWritableObject(o).hashCode(); case TIMESTAMP: TimestampWritable t = ((TimestampObjectInspector) poi) .getPrimitiveWritableObject(o); @@ -672,6 +676,13 @@ return bw1.compareTo(bw2); } + case DATE: { + DateWritable d1 = ((DateObjectInspector) poi1) + .getPrimitiveWritableObject(o1); + DateWritable d2 = ((DateObjectInspector) poi2) + .getPrimitiveWritableObject(o2); + return d1.compareTo(d2); + } case TIMESTAMP: { TimestampWritable t1 = ((TimestampObjectInspector) poi1) .getPrimitiveWritableObject(o1); Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java (revision 0) @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import java.sql.Date; + +import org.apache.hadoop.hive.serde2.io.DateWritable; + + +public interface SettableDateObjectInspector extends DateObjectInspector { + +// Object set(Object o, byte[] bytes, int offset); + + Object set(Object o, Date d); + + Object set(Object o, DateWritable d); + +// Object create(byte[] bytes, int offset); + + Object create (Date d); +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java (revision 0) @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import java.sql.Date; + +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +public interface DateObjectInspector extends PrimitiveObjectInspector { + + DateWritable getPrimitiveWritableObject(Object o); + + Date getPrimitiveJavaObject(Object o); +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (working copy) @@ -22,6 +22,7 @@ import java.io.DataOutput; import java.io.IOException; import java.math.BigDecimal; +import java.sql.Date; import java.sql.Timestamp; import java.util.HashMap; import java.util.Map; @@ -31,6 +32,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; @@ -177,7 +179,9 @@ public static final PrimitiveTypeEntry shortTypeEntry = new PrimitiveTypeEntry( PrimitiveCategory.SHORT, serdeConstants.SMALLINT_TYPE_NAME, Short.TYPE, Short.class, ShortWritable.class); - + public static final PrimitiveTypeEntry dateTypeEntry = new PrimitiveTypeEntry( + PrimitiveCategory.DATE, serdeConstants.DATE_TYPE_NAME, null, + Date.class, DateWritable.class); public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry( PrimitiveCategory.TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME, null, Timestamp.class, TimestampWritable.class); @@ -200,6 +204,7 @@ registerType(doubleTypeEntry); registerType(byteTypeEntry); registerType(shortTypeEntry); + registerType(dateTypeEntry); registerType(timestampTypeEntry); registerType(decimalTypeEntry); registerType(unknownTypeEntry); @@ -361,6 +366,10 @@ .getPrimitiveWritableObject(o2); return t1.equals(t2); } + case DATE: { + return ((DateObjectInspector) oi1).getPrimitiveWritableObject(o1) + .equals(((DateObjectInspector) oi2).getPrimitiveWritableObject(o2)); + } case TIMESTAMP: { return ((TimestampObjectInspector) oi1).getPrimitiveWritableObject(o1) .equals(((TimestampObjectInspector) oi2).getPrimitiveWritableObject(o2)); @@ -399,6 +408,8 @@ return ((DoubleObjectInspector) oi).get(o); case STRING: return Double.valueOf(((StringObjectInspector) oi).getPrimitiveJavaObject(o)); + case DATE: + return ((DateObjectInspector) oi).getPrimitiveWritableObject(o).getTimeInSeconds(); case TIMESTAMP: return ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o) .getDouble(); @@ -474,6 +485,10 @@ result = s.length() != 0; } break; + case DATE: + result = (((DateObjectInspector) oi) + .getPrimitiveWritableObject(o).getTimeInSeconds() != 0); + break; case TIMESTAMP: result = (((TimestampObjectInspector) oi) .getPrimitiveWritableObject(o).getSeconds() != 0); @@ -558,6 +573,10 @@ } break; } + case DATE: + result = (int) (((DateObjectInspector) oi) + .getPrimitiveWritableObject(o).getTimeInSeconds()); + break; case TIMESTAMP: result = (int) (((TimestampObjectInspector) oi) .getPrimitiveWritableObject(o).getSeconds()); @@ -616,6 +635,10 @@ result = Long.parseLong(s); } break; + case DATE: + result = ((DateObjectInspector) oi).getPrimitiveWritableObject(o) + .getTimeInSeconds(); + break; case TIMESTAMP: result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o) .getSeconds(); @@ -668,6 +691,9 @@ String s = soi.getPrimitiveJavaObject(o); result = Double.parseDouble(s); break; + case DATE: + result = ((DateObjectInspector) oi).getPrimitiveWritableObject(o).getTimeInSeconds(); + break; case TIMESTAMP: result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getDouble(); break; @@ -732,6 +758,9 @@ StringObjectInspector soi = (StringObjectInspector) oi; result = soi.getPrimitiveJavaObject(o); break; + case DATE: + result = ((DateObjectInspector) oi).getPrimitiveWritableObject(o).toString(); + break; case TIMESTAMP: result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).toString(); break; @@ -809,6 +838,9 @@ case STRING: result = new BigDecimal(((StringObjectInspector) oi).getPrimitiveJavaObject(o)); break; + case DATE: + result = new BigDecimal(((DateObjectInspector) oi).getPrimitiveWritableObject(o).get().getTime()); + break; case TIMESTAMP: Double ts = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o) .getDouble(); @@ -824,6 +856,67 @@ return result; } + public static Date getDate(Object o, PrimitiveObjectInspector oi) { + if (o == null) { + return null; + } + + Date result = null; + switch (oi.getPrimitiveCategory()) { + case VOID: + result = null; + break; + case BOOLEAN: + result = DateWritable.timeToDate((((BooleanObjectInspector) oi).get(o))? 1 : 0); + break; + case BYTE: + result = DateWritable.timeToDate(((ByteObjectInspector) oi).get(o)); + break; + case SHORT: + result = DateWritable.timeToDate(((ShortObjectInspector) oi).get(o)); + break; + case INT: + result = DateWritable.timeToDate(((IntObjectInspector) oi).get(o)); + break; + case LONG: + result = DateWritable.timeToDate(((LongObjectInspector) oi).get(o)); + break; + case FLOAT: + result = DateWritable.timeToDate((long)((FloatObjectInspector) oi).get(o)); + break; + case DOUBLE: + result = DateWritable.timeToDate((long)((DoubleObjectInspector) oi).get(o)); + break; + case DECIMAL: + result = DateWritable.timeToDate(((BigDecimalObjectInspector) oi) + .getPrimitiveJavaObject(o).longValue()); + break; + case STRING: + StringObjectInspector soi = (StringObjectInspector) oi; + String s = soi.getPrimitiveJavaObject(o).trim(); + try { + result = Date.valueOf(s); + } catch (IllegalArgumentException e) { + result = null; + } + break; + case DATE: + result = ((DateObjectInspector) oi).getPrimitiveWritableObject(o).get(); + break; + case TIMESTAMP: + result = DateWritable.timeToDate(((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getSeconds()); + break; + case BINARY: + throw new RuntimeException("Cannot convert to Date from: " + + oi.getTypeName()); + default: + throw new RuntimeException("Hive 2 Internal error: unknown type: " + + oi.getTypeName()); + } + + return result; + } + public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector oi) { if (o == null) { return null; @@ -876,6 +969,9 @@ result = null; } break; + case DATE: + result = new Timestamp(((DateObjectInspector) oi).getPrimitiveWritableObject(o).get().getTime()); + break; case TIMESTAMP: result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getTimestamp(); break; Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (working copy) @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -66,6 +67,8 @@ new JavaStringObjectInspector(); public static final JavaVoidObjectInspector javaVoidObjectInspector = new JavaVoidObjectInspector(); + public static final JavaDateObjectInspector javaDateObjectInspector = + new JavaDateObjectInspector(); public static final JavaTimestampObjectInspector javaTimestampObjectInspector = new JavaTimestampObjectInspector(); public static final JavaBinaryObjectInspector javaByteArrayObjectInspector = @@ -91,6 +94,8 @@ new WritableStringObjectInspector(); public static final WritableVoidObjectInspector writableVoidObjectInspector = new WritableVoidObjectInspector(); + public static final WritableDateObjectInspector writableDateObjectInspector = + new WritableDateObjectInspector(); public static final WritableTimestampObjectInspector writableTimestampObjectInspector = new WritableTimestampObjectInspector(); public static final WritableBinaryObjectInspector writableBinaryObjectInspector = @@ -119,6 +124,8 @@ writableStringObjectInspector); cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.VOID, writableVoidObjectInspector); + cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.DATE, + writableDateObjectInspector); cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.TIMESTAMP, writableTimestampObjectInspector); cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.BINARY, @@ -148,6 +155,8 @@ javaStringObjectInspector); cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.VOID, javaVoidObjectInspector); + cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.DATE, + javaDateObjectInspector); cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.TIMESTAMP, javaTimestampObjectInspector); cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.BINARY, @@ -198,6 +207,8 @@ return new WritableConstantDoubleObjectInspector((DoubleWritable)value); case STRING: return new WritableConstantStringObjectInspector((Text)value); + case DATE: + return new WritableConstantDateObjectInspector((DateWritable)value); case TIMESTAMP: return new WritableConstantTimestampObjectInspector((TimestampWritable)value); case DECIMAL: Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (working copy) @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.serde2.objectinspector.primitive; import java.math.BigDecimal; +import java.sql.Date; import java.sql.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream; @@ -237,6 +238,27 @@ } } + public static class DateConverter implements Converter { + PrimitiveObjectInspector inputOI; + SettableDateObjectInspector outputOI; + Object r; + + public DateConverter(PrimitiveObjectInspector inputOI, + SettableDateObjectInspector outputOI) { + this.inputOI = inputOI; + this.outputOI = outputOI; + r = outputOI.create(new Date(0)); + } + + public Object convert(Object input) { + if (input == null) { + return null; + } + return outputOI.set(r, PrimitiveObjectInspectorUtils.getDate(input, + inputOI)); + } + } + public static class TimestampConverter implements Converter { PrimitiveObjectInspector inputOI; SettableTimestampObjectInspector outputOI; @@ -364,6 +386,9 @@ t.set(((StringObjectInspector) inputOI).getPrimitiveJavaObject(input)); } return t; + case DATE: + t.set(((DateObjectInspector) inputOI).getPrimitiveWritableObject(input).toString()); + return t; case TIMESTAMP: t.set(((TimestampObjectInspector) inputOI) .getPrimitiveWritableObject(input).toString()); Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java (revision 0) @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import java.sql.Date; + +import org.apache.hadoop.hive.serde2.io.DateWritable; + +public class JavaDateObjectInspector + extends AbstractPrimitiveJavaObjectInspector + implements SettableDateObjectInspector { + + protected JavaDateObjectInspector() { + super(PrimitiveObjectInspectorUtils.dateTypeEntry); + } + + public DateWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : new DateWritable((Date) o); + } + + @Override + public Date getPrimitiveJavaObject(Object o) { + return o == null ? null : (Date) o; + } + + public Date get(Object o) { + return (Date) o; + } + + public Object set(Object o, Date value) { + ((Date) o).setTime(value.getTime()); + return o; + } + +/* + public Object set(Object o, byte[] bytes, int offset) { + TimestampWritable.setTimestamp((Timestamp) o, bytes, offset); + return o; + } +*/ + + public Object set(Object o, DateWritable d) { + ((Date) o).setTime(d.get().getTime()); + return o; + } + + public Object create(Date value) { + return new Date(value.getTime()); + } + +/* + public Object create(byte[] bytes, int offset) { + return TimestampWritable.createTimestamp(bytes, offset); + } +*/ +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java (revision 0) @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; + +import org.apache.hadoop.hive.serde2.io.DateWritable; + +/** + * A WritableConstantDateObjectInspector is a WritableDateObjectInspector + * that implements ConstantObjectInspector. + */ +public class WritableConstantDateObjectInspector extends + WritableDateObjectInspector implements + ConstantObjectInspector { + + private DateWritable value; + + WritableConstantDateObjectInspector(DateWritable value) { + super(); + this.value = value; + } + + @Override + public DateWritable getWritableConstantValue() { + return value; + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java (revision 0) @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import java.sql.Date; + +import org.apache.hadoop.hive.serde2.io.DateWritable; + +public class WritableDateObjectInspector extends + AbstractPrimitiveWritableObjectInspector implements + SettableDateObjectInspector { + + public WritableDateObjectInspector() { + super(PrimitiveObjectInspectorUtils.dateTypeEntry); + } + + @Override + public DateWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : (DateWritable) o; + } + + public Date getPrimitiveJavaObject(Object o) { + return o == null ? null : ((DateWritable) o).get(); + } + + public Object copyObject(Object o) { + return o == null ? null : new DateWritable((DateWritable) o); + } + +/* + public Object set(Object o, byte[] bytes, int offset) { + ((DateWritable) o).set(bytes, offset); + return o; + } +*/ + + public Object set(Object o, Date d) { + ((DateWritable) o).set(d); + return o; + } + + public Object set(Object o, DateWritable d) { + ((DateWritable) o).set(d); + return o; + } + +/* + public Object create(byte[] bytes, int offset) { + return new DateWritable(bytes, offset); + } +*/ + + public Object create(Date d) { + return new DateWritable(d); + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (working copy) @@ -62,6 +62,7 @@ public static final TypeInfo doubleTypeInfo = getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME); public static final TypeInfo byteTypeInfo = getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME); public static final TypeInfo shortTypeInfo = getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME); + public static final TypeInfo dateTypeInfo = getPrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME); public static final TypeInfo timestampTypeInfo = getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME); public static final TypeInfo binaryTypeInfo = getPrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME); public static final TypeInfo decimalTypeInfo = getPrimitiveTypeInfo(serdeConstants.DECIMAL_TYPE_NAME); Index: serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (revision 1447989) +++ serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (working copy) @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; @@ -266,6 +267,13 @@ sb.append('"'); break; } + case DATE: { + sb.append('"'); + sb.append(((DateObjectInspector) poi) + .getPrimitiveWritableObject(o)); + sb.append('"'); + break; + } case TIMESTAMP: { sb.append('"'); sb.append(((TimestampObjectInspector) poi) Index: serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java (revision 0) @@ -0,0 +1,122 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.io; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.sql.Date; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.io.WritableUtils; + +/** + * DateWritable + * Writable equivalent of java.sql.Date + * + * Dates are of the format + * YYYY-MM-DD + * + */ +public class DateWritable implements WritableComparable { + static final private Log LOG = LogFactory.getLog(DateWritable.class); + + private Date date = new Date(0); + + /* Constructors */ + public DateWritable() { + } + + public DateWritable(DateWritable d) { + set(d); + } + + public DateWritable(Date d) { + set(d); + } + + public void set(Date d) { + if (d == null) { + date.setTime(0); + return; + } + this.date = d; + } + + public void set(DateWritable d) { + set(d.get()); + } + + public Date get() { + return date; + } + + /** + * + * @return time in seconds corresponding to this DateWritable + */ + public long getTimeInSeconds() { + return date.getTime() / 1000; + } + + public static Date timeToDate (long l) { + return new Date(l * 1000); + } + + @Override + public void readFields(DataInput in) throws IOException { + date.setTime(WritableUtils.readVLong(in) * 1000); + } + + @Override + public void write(DataOutput out) throws IOException { + WritableUtils.writeVLong(out, getTimeInSeconds()); + } + + @Override + public int compareTo(DateWritable d) { + long diff = date.getTime() - d.get().getTime(); + if (diff > 0) { + return 1; + } else if (diff == 0) { + return 0; + } else { + return -1; + } + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof DateWritable)) { + return false; + } + return compareTo((DateWritable) o) == 0; + } + + @Override + public String toString() { + return date.toString(); + } + + @Override + public int hashCode() { + return date.hashCode(); + } +} Index: jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java =================================================================== --- jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (revision 1447989) +++ jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (working copy) @@ -149,7 +149,8 @@ + " c15 struct>," + " c16 array,n:int>>," + " c17 timestamp, " - + " c18 decimal) comment'" + dataTypeTableComment + + " c18 decimal," + + " c19 date) comment'" + dataTypeTableComment +"' partitioned by (dt STRING)"); assertFalse(res.next()); @@ -383,6 +384,8 @@ assertEquals(null, res.getString(17)); assertEquals(null, res.getTimestamp(17)); assertEquals(null, res.getBigDecimal(18)); + assertEquals(null, res.getString(19)); + assertEquals(null, res.getDate(19)); // row 3 assertTrue(res.next()); @@ -405,6 +408,8 @@ assertEquals("2012-04-22 09:00:00.123456789", res.getString(17)); assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString()); assertEquals("123456789.0123456", res.getBigDecimal(18).toString()); + assertEquals("2013-01-01", res.getString(19)); + assertEquals("2013-01-01", res.getDate(19).toString()); // test getBoolean rules on non-boolean columns assertEquals(true, res.getBoolean(1)); @@ -806,13 +811,13 @@ ResultSet res = stmt.executeQuery( "select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + - "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1"); + "c1*2, sentences(null, null, null) as b, c17, c18, c19 from " + dataTypeTableName + " limit 1"); ResultSetMetaData meta = res.getMetaData(); ResultSet colRS = con.getMetaData().getColumns(null, null, dataTypeTableName.toLowerCase(), null); - assertEquals(16, meta.getColumnCount()); + assertEquals(17, meta.getColumnCount()); assertTrue(colRS.next()); @@ -1022,6 +1027,13 @@ assertEquals(Integer.MAX_VALUE, meta.getPrecision(16)); assertEquals(Integer.MAX_VALUE, meta.getScale(16)); + assertEquals("c19", meta.getColumnName(17)); + assertEquals(Types.DATE, meta.getColumnType(17)); + assertEquals("date", meta.getColumnTypeName(17)); + assertEquals(10, meta.getColumnDisplaySize(17)); + assertEquals(10, meta.getPrecision(17)); + assertEquals(0, meta.getScale(17)); + for (int i = 1; i <= meta.getColumnCount(); i++) { assertFalse(meta.isAutoIncrement(i)); assertFalse(meta.isCurrency(i)); Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (revision 1447989) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (working copy) @@ -110,6 +110,8 @@ return serdeConstants.INT_TYPE_NAME; } else if ("bigint".equalsIgnoreCase(type)) { return serdeConstants.BIGINT_TYPE_NAME; + } else if ("date".equalsIgnoreCase(type)) { + return serdeConstants.DATE_TYPE_NAME; } else if ("timestamp".equalsIgnoreCase(type)) { return serdeConstants.TIMESTAMP_TYPE_NAME; } else if ("decimal".equalsIgnoreCase(type)) { Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (revision 1447989) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (working copy) @@ -74,6 +74,7 @@ case Types.INTEGER: case Types.BIGINT: return columnPrecision(columnType) + 1; // allow +/- + case Types.DATE: case Types.TIMESTAMP: return columnPrecision(columnType); // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT @@ -108,6 +109,8 @@ return 7; case Types.DOUBLE: return 15; + case Types.DATE: + return 10; case Types.TIMESTAMP: return 29; case Types.DECIMAL: @@ -126,6 +129,7 @@ case Types.SMALLINT: case Types.INTEGER: case Types.BIGINT: + case Types.DATE: return 0; case Types.FLOAT: return 7; Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (revision 1447989) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (working copy) @@ -46,6 +46,8 @@ return Types.INTEGER; } else if ("bigint".equalsIgnoreCase(type)) { return Types.BIGINT; + } else if ("date".equalsIgnoreCase(type)) { + return Types.DATE; } else if ("timestamp".equalsIgnoreCase(type)) { return Types.TIMESTAMP; } else if ("decimal".equalsIgnoreCase(type)) { Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java (revision 1447989) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java (working copy) @@ -214,12 +214,20 @@ return null; } + if (obj instanceof Date) { + return (Date) obj; + } + try { - return Date.valueOf((String) obj); + if (obj instanceof String) { + return Date.valueOf((String)obj); + } } catch (Exception e) { throw new SQLException("Cannot convert column " + columnIndex + " to date: " + e.toString()); } + + throw new SQLException("Illegal conversion"); } public Date getDate(String columnName) throws SQLException { Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (revision 1447989) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (working copy) @@ -495,8 +495,7 @@ */ public void setDate(int parameterIndex, Date x) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + this.parameters.put(parameterIndex, x.toString()); } /* Index: data/files/datatypes.txt =================================================================== --- data/files/datatypes.txt (revision 1447989) +++ data/files/datatypes.txt (working copy) @@ -1,3 +1,3 @@ -\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N --1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N -1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789123456789.0123456 +\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N +-1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N\N +1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789123456789.01234562013-01-01