diff --git ql/src/test/queries/clientpositive/decimal_udf.q ql/src/test/queries/clientpositive/decimal_udf.q index 06d1785..c72f2f9 100644 --- ql/src/test/queries/clientpositive/decimal_udf.q +++ ql/src/test/queries/clientpositive/decimal_udf.q @@ -7,6 +7,8 @@ STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_UDF; +set hive.fetch.task.conversion=more; + -- addition EXPLAIN SELECT key + key FROM DECIMAL_UDF; SELECT key + key FROM DECIMAL_UDF; diff --git ql/src/test/queries/clientpositive/timestamp_1.q ql/src/test/queries/clientpositive/timestamp_1.q index f2c3b59..4f8d125 100644 --- ql/src/test/queries/clientpositive/timestamp_1.q +++ ql/src/test/queries/clientpositive/timestamp_1.q @@ -3,8 +3,10 @@ drop table timestamp_1; create table timestamp_1 (t timestamp); alter table timestamp_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; +set hive.fetch.task.conversion=more; + insert overwrite table timestamp_1 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1; + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -15,7 +17,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01' from src limit 1; + select '2011-01-01 01:01:01' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -26,7 +28,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.1' from src limit 1; + select '2011-01-01 01:01:01.1' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -37,7 +39,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.0001' from src limit 1; + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -48,7 +50,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.000100000' from src limit 1; + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -59,7 +61,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.001000011' from src limit 1; + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; diff --git ql/src/test/queries/clientpositive/timestamp_2.q ql/src/test/queries/clientpositive/timestamp_2.q index b93208f..07fcdbb 100644 --- ql/src/test/queries/clientpositive/timestamp_2.q +++ ql/src/test/queries/clientpositive/timestamp_2.q @@ -3,8 +3,10 @@ drop table timestamp_2; create table timestamp_2 (t timestamp); alter table timestamp_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'; +set hive.fetch.task.conversion=more; + insert overwrite table timestamp_2 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1; + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -15,7 +17,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01' from src limit 1; + select '2011-01-01 01:01:01' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -26,7 +28,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.1' from src limit 1; + select '2011-01-01 01:01:01.1' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -37,7 +39,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.0001' from src limit 1; + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -48,7 +50,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.000100000' from src limit 1; + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -59,7 +61,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.001000011' from src limit 1; + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; diff --git ql/src/test/queries/clientpositive/timestamp_3.q ql/src/test/queries/clientpositive/timestamp_3.q index cda724f..41d2818 100644 --- ql/src/test/queries/clientpositive/timestamp_3.q +++ ql/src/test/queries/clientpositive/timestamp_3.q @@ -3,8 +3,10 @@ drop table timestamp_3; create table timestamp_3 (t timestamp); alter table timestamp_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; +set hive.fetch.task.conversion=more; + insert overwrite table timestamp_3 - select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1; + select cast(cast('1.3041352164485E9' as double) as timestamp) from src tablesample (1 rows); select cast(t as boolean) from timestamp_3 limit 1; select cast(t as tinyint) from timestamp_3 limit 1; select cast(t as smallint) from timestamp_3 limit 1; diff --git ql/src/test/queries/clientpositive/timestamp_lazy.q ql/src/test/queries/clientpositive/timestamp_lazy.q index 7a10052..e9a0cfa 100644 --- ql/src/test/queries/clientpositive/timestamp_lazy.q +++ ql/src/test/queries/clientpositive/timestamp_lazy.q @@ -1,6 +1,6 @@ drop table timestamp_lazy; create table timestamp_lazy (t timestamp, key string, value string); -insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src limit 5; +insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src tablesample (5 rows); select t,key,value from timestamp_lazy ORDER BY key ASC, value ASC; select t,key,value from timestamp_lazy distribute by t sort by key ASC, value ASC; diff --git ql/src/test/queries/clientpositive/timestamp_udf.q ql/src/test/queries/clientpositive/timestamp_udf.q index 2620ace..40d8402 100644 --- ql/src/test/queries/clientpositive/timestamp_udf.q +++ ql/src/test/queries/clientpositive/timestamp_udf.q @@ -3,11 +3,13 @@ drop table timestamp_udf_string; create table timestamp_udf (t timestamp); create table timestamp_udf_string (t string); -from src +from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' insert overwrite table timestamp_udf_string - select '2011-05-06 07:08:09.1234567' limit 1; + select '2011-05-06 07:08:09.1234567'; + +set hive.fetch.task.conversion=more; -- Test UDFs with Timestamp input select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t), diff --git ql/src/test/queries/clientpositive/udf_get_json_object.q ql/src/test/queries/clientpositive/udf_get_json_object.q index 464f2df..5cfa55f 100644 --- ql/src/test/queries/clientpositive/udf_get_json_object.q +++ ql/src/test/queries/clientpositive/udf_get_json_object.q @@ -5,6 +5,8 @@ CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE; FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86; +set hive.fetch.task.conversion=more; + EXPLAIN SELECT get_json_object(src_json.json, '$.owner') FROM src_json; @@ -33,8 +35,8 @@ SELECT get_json_object(src_json.json, '$.fb:testid') FROM src_json; CREATE TABLE dest2(c1 STRING) STORED AS RCFILE; -INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src LIMIT 1; +INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows); SELECT * FROM dest2; -SELECT get_json_object(c1, '$.a') FROM dest2; \ No newline at end of file +SELECT get_json_object(c1, '$.a') FROM dest2; diff --git ql/src/test/queries/clientpositive/udf_printf.q ql/src/test/queries/clientpositive/udf_printf.q index 99e89cc..ebc0f99 100644 --- ql/src/test/queries/clientpositive/udf_printf.q +++ ql/src/test/queries/clientpositive/udf_printf.q @@ -4,21 +4,23 @@ use default; DESCRIBE FUNCTION printf; DESCRIBE FUNCTION EXTENDED printf; +set hive.fetch.task.conversion=more; + EXPLAIN -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1; +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows); -- Test Primitive Types -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1; -SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1; +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows); +SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src tablesample (1 rows); -- Test NULL Values -SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1; +SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src tablesample (1 rows); -- Test Timestamp create table timestamp_udf (t timestamp); -from src +from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1; + select '2011-05-06 07:08:09.1234567'; select printf("timestamp: %s", t) from timestamp_udf; drop table timestamp_udf; diff --git ql/src/test/queries/clientpositive/udtf_json_tuple.q ql/src/test/queries/clientpositive/udtf_json_tuple.q index 712d959..93d829d 100644 --- ql/src/test/queries/clientpositive/udtf_json_tuple.q +++ ql/src/test/queries/clientpositive/udtf_json_tuple.q @@ -2,17 +2,17 @@ create table json_t (key string, jstring string); insert overwrite table json_t select * from ( - select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src limit 1 + select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src tablesample (1 rows) union all - select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src limit 1 + select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src tablesample (1 rows) union all - select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src limit 1 + select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', '{"f1": "", "f5": null}' from src limit 1 + select '5', '{"f1": "", "f5": null}' from src tablesample (1 rows) union all - select '6', '[invalid JSON string]' from src limit 1 + select '6', '[invalid JSON string]' from src tablesample (1 rows) ) s; explain @@ -40,7 +40,7 @@ select f2, count(*) from json_t a lateral view json_tuple(a.jstring, 'f1', 'f2', CREATE TABLE dest1(c1 STRING) STORED AS RCFILE; -INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src LIMIT 1; +INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows); SELECT * FROM dest1; diff --git ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q index 055e39b..0870cbc 100644 --- ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q +++ ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q @@ -2,17 +2,17 @@ create table url_t (key string, fullurl string); insert overwrite table url_t select * from ( - select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src limit 1 + select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src tablesample (1 rows) union all - select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src limit 1 + select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src tablesample (1 rows) union all - select '3', 'ftp://sites.google.com/a/example.com/site/page' from src limit 1 + select '3', 'ftp://sites.google.com/a/example.com/site/page' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', 'htttp://' from src limit 1 + select '5', 'htttp://' from src tablesample (1 rows) union all - select '6', '[invalid url string]' from src limit 1 + select '6', '[invalid url string]' from src tablesample (1 rows) ) s; describe function parse_url_tuple; diff --git ql/src/test/results/clientpositive/decimal_udf.q.out ql/src/test/results/clientpositive/decimal_udf.q.out index 652d1a8..51c0426 100644 --- ql/src/test/results/clientpositive/decimal_udf.q.out +++ ql/src/test/results/clientpositive/decimal_udf.q.out @@ -29,33 +29,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -111,33 +99,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + value FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -193,33 +169,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -275,33 +239,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) '1.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + '1.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + '1.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -359,33 +311,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -441,33 +381,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - value FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -523,33 +451,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -605,33 +521,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) '1.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - '1.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - '1.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -689,33 +593,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -771,33 +663,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * value FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -853,33 +733,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -935,33 +803,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) '2.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * '2.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * '2.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1019,34 +875,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) 0))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key / 0) - type: decimal(65,30) - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key / 0) + type: decimal(65,30) + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1 PREHOOK: type: QUERY @@ -1065,34 +909,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) TOK_NULL))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key / null) - type: decimal(65,30) - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key / null) + type: decimal(65,30) + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1 PREHOOK: type: QUERY @@ -1111,37 +943,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL key)) (<> (TOK_TABLE_OR_COL key) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Filter Operator - predicate: - expr: (key is not null and (key <> 0)) - type: boolean - Select Operator - expressions: - expr: (key / key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Filter Operator + predicate: + expr: (key is not null and (key <> 0)) + type: boolean + Select Operator + expressions: + expr: (key / key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 PREHOOK: type: QUERY @@ -1193,37 +1013,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value)) (<> (TOK_TABLE_OR_COL value) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Filter Operator - predicate: - expr: (value is not null and (value <> 0)) - type: boolean - Select Operator - expressions: - expr: (key / value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Filter Operator + predicate: + expr: (value is not null and (value <> 0)) + type: boolean + Select Operator + expressions: + expr: (key / value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 PREHOOK: type: QUERY @@ -1265,37 +1073,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value)) (<> (TOK_TABLE_OR_COL value) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Filter Operator - predicate: - expr: (value is not null and (value <> 0)) - type: boolean - Select Operator - expressions: - expr: (key / (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Filter Operator + predicate: + expr: (value is not null and (value <> 0)) + type: boolean + Select Operator + expressions: + expr: (key / (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 PREHOOK: type: QUERY @@ -1337,33 +1133,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) '2.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key / '2.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key / '2.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / '2.0' FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1421,33 +1205,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: abs(key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: abs(key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT abs(key) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1611,7 +1383,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT value, sum(key) / count(key), avg(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf @@ -1647,33 +1418,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (- key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (- key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT -key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1731,33 +1490,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: key - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: key + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT +key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1815,33 +1562,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CEIL (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: ceil(key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: ceil(key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1899,33 +1634,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION FLOOR (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: floor(key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: floor(key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1983,33 +1706,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_TABLE_OR_COL key) 2))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: round(key, 2) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: round(key, 2) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -2067,33 +1778,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION POWER (TOK_TABLE_OR_COL key) 2))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: power(key, 2) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: power(key, 2) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -2151,33 +1850,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (% (+ (TOK_TABLE_OR_COL key) 1) (/ (TOK_TABLE_OR_COL key) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: ((key + 1) % (key / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: ((key + 1) % (key / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -2308,7 +1995,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf @@ -2417,7 +2103,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf @@ -2504,7 +2189,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf @@ -2575,7 +2259,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT MIN(key) FROM DECIMAL_UDF PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf @@ -2646,7 +2329,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT MAX(key) FROM DECIMAL_UDF PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf @@ -2717,7 +2399,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: SELECT COUNT(key) FROM DECIMAL_UDF PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf diff --git ql/src/test/results/clientpositive/timestamp_1.q.out ql/src/test/results/clientpositive/timestamp_1.q.out index cf62457..e3edde3 100644 --- ql/src/test/results/clientpositive/timestamp_1.q.out +++ ql/src/test/results/clientpositive/timestamp_1.q.out @@ -16,12 +16,12 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@timestamp_1 POSTHOOK: Output: default@timestamp_1 PREHOOK: query: insert overwrite table timestamp_1 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -107,12 +107,12 @@ POSTHOOK: Input: default@timestamp_1 POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -207,12 +207,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -316,12 +316,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01.1 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -434,12 +434,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -561,12 +561,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 diff --git ql/src/test/results/clientpositive/timestamp_2.q.out ql/src/test/results/clientpositive/timestamp_2.q.out index 1422bac..9ef6239 100644 --- ql/src/test/results/clientpositive/timestamp_2.q.out +++ ql/src/test/results/clientpositive/timestamp_2.q.out @@ -16,12 +16,12 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@timestamp_2 POSTHOOK: Output: default@timestamp_2 PREHOOK: query: insert overwrite table timestamp_2 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -107,12 +107,12 @@ POSTHOOK: Input: default@timestamp_2 POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -207,12 +207,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -316,12 +316,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01.1 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -434,12 +434,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -561,12 +561,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 diff --git ql/src/test/results/clientpositive/timestamp_3.q.out ql/src/test/results/clientpositive/timestamp_3.q.out index 0a042fb..30adea5 100644 --- ql/src/test/results/clientpositive/timestamp_3.q.out +++ ql/src/test/results/clientpositive/timestamp_3.q.out @@ -16,12 +16,12 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@timestamp_3 POSTHOOK: Output: default@timestamp_3 PREHOOK: query: insert overwrite table timestamp_3 - select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1 + select cast(cast('1.3041352164485E9' as double) as timestamp) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_3 POSTHOOK: query: insert overwrite table timestamp_3 - select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1 + select cast(cast('1.3041352164485E9' as double) as timestamp) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_3 diff --git ql/src/test/results/clientpositive/timestamp_lazy.q.out ql/src/test/results/clientpositive/timestamp_lazy.q.out index fff9784..62a5ab7 100644 --- ql/src/test/results/clientpositive/timestamp_lazy.q.out +++ ql/src/test/results/clientpositive/timestamp_lazy.q.out @@ -7,11 +7,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table timestamp_lazy (t timestamp, key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@timestamp_lazy -PREHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src limit 5 +PREHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_lazy -POSTHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src limit 5 +POSTHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_lazy diff --git ql/src/test/results/clientpositive/timestamp_udf.q.out ql/src/test/results/clientpositive/timestamp_udf.q.out index c04de7a..8d16c48 100644 --- ql/src/test/results/clientpositive/timestamp_udf.q.out +++ ql/src/test/results/clientpositive/timestamp_udf.q.out @@ -16,20 +16,20 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table timestamp_udf_string (t string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@timestamp_udf_string -PREHOOK: query: from src +PREHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' insert overwrite table timestamp_udf_string - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_udf PREHOOK: Output: default@timestamp_udf_string -POSTHOOK: query: from src +POSTHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' insert overwrite table timestamp_udf_string - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_udf diff --git ql/src/test/results/clientpositive/udf_get_json_object.q.out ql/src/test/results/clientpositive/udf_get_json_object.q.out index a7b37f1..28069e6 100644 --- ql/src/test/results/clientpositive/udf_get_json_object.q.out +++ ql/src/test/results/clientpositive/udf_get_json_object.q.out @@ -48,33 +48,21 @@ ABSTRACT SYNTAX TREE: #### A masked pattern was here #### STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src_json - TableScan - alias: src_json - Select Operator - expressions: -#### A masked pattern was here #### - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 - + Processor Tree: + TableScan + alias: src_json + Select Operator + expressions: +#### A masked pattern was here #### + type: string + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT get_json_object(src_json.json, '$') FROM src_json PREHOOK: type: QUERY @@ -184,11 +172,11 @@ CREATE TABLE dest2(c1 STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest2 POSTHOOK: Lineage: dest1.c1 SIMPLE [] -PREHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest2 -POSTHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest2 diff --git ql/src/test/results/clientpositive/udf_printf.q.out ql/src/test/results/clientpositive/udf_printf.q.out index 9cc2158..cd9fda6 100644 --- ql/src/test/results/clientpositive/udf_printf.q.out +++ ql/src/test/results/clientpositive/udf_printf.q.out @@ -20,75 +20,64 @@ Example: > SELECT printf("Hello World %d %s", 100, "days")FROM src LIMIT 1; "Hello World 100 days" PREHOOK: query: EXPLAIN -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION printf "Hello World %d %s" 100 "days"))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION printf "Hello World %d %s" 100 "days"))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: printf('Hello World %d %s', 100, 'days') - type: string - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 - + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: printf('Hello World %d %s', 100, 'days') + type: string + outputColumnNames: _col0 + ListSink PREHOOK: query: -- Test Primitive Types -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Test Primitive Types -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### Hello World 100 days -PREHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1 +PREHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1 +POSTHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### All Type Test: false, A, 15000, 1.234000e+01, +27183.2401, 2300.41, 32, corret, 0x1.002p8 PREHOOK: query: -- Test NULL Values -SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1 +SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Test NULL Values -SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1 +SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### Color red, String Null: null, number1 123456, number2 00089, Integer Null: null, hex 0xff, float 3.14 Double Null: null + PREHOOK: query: -- Test Timestamp create table timestamp_udf (t timestamp) PREHOOK: type: CREATETABLE @@ -96,15 +85,15 @@ POSTHOOK: query: -- Test Timestamp create table timestamp_udf (t timestamp) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@timestamp_udf -PREHOOK: query: from src +PREHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_udf -POSTHOOK: query: from src +POSTHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_udf diff --git ql/src/test/results/clientpositive/udtf_json_tuple.q.out ql/src/test/results/clientpositive/udtf_json_tuple.q.out index 80f0725..1a480b6 100644 --- ql/src/test/results/clientpositive/udtf_json_tuple.q.out +++ ql/src/test/results/clientpositive/udtf_json_tuple.q.out @@ -5,34 +5,34 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@json_t PREHOOK: query: insert overwrite table json_t select * from ( - select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src limit 1 + select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src tablesample (1 rows) union all - select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src limit 1 + select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src tablesample (1 rows) union all - select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src limit 1 + select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', '{"f1": "", "f5": null}' from src limit 1 + select '5', '{"f1": "", "f5": null}' from src tablesample (1 rows) union all - select '6', '[invalid JSON string]' from src limit 1 + select '6', '[invalid JSON string]' from src tablesample (1 rows) ) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@json_t POSTHOOK: query: insert overwrite table json_t select * from ( - select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src limit 1 + select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src tablesample (1 rows) union all - select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src limit 1 + select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src tablesample (1 rows) union all - select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src limit 1 + select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', '{"f1": "", "f5": null}' from src limit 1 + select '5', '{"f1": "", "f5": null}' from src tablesample (1 rows) union all - select '6', '[invalid JSON string]' from src limit 1 + select '6', '[invalid JSON string]' from src tablesample (1 rows) ) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -170,7 +170,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select a.key, b.* from json_t a lateral view json_tuple(a.jstring, 'f1', 'f2', 'f3', 'f4', 'f5') b as f1, f2, f3, f4, f5 order by a.key PREHOOK: type: QUERY PREHOOK: Input: default@json_t @@ -261,7 +260,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select json_tuple(a.jstring, 'f1', 'f2', 'f3', 'f4', 'f5') as (f1, f2, f3, f4, f5) from json_t a order by f1, f2, f3 PREHOOK: type: QUERY PREHOOK: Input: default@json_t @@ -385,7 +383,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select a.key, b.f2, b.f5 from json_t a lateral view json_tuple(a.jstring, 'f1', 'f2', 'f3', 'f4', 'f5') b as f1, f2, f3, f4, f5 order by a.key PREHOOK: type: QUERY PREHOOK: Input: default@json_t @@ -563,7 +560,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select f2, count(*) from json_t a lateral view json_tuple(a.jstring, 'f1', 'f2', 'f3', 'f4', 'f5') b as f1, f2, f3, f4, f5 where f1 is not null group by f2 order by f2 PREHOOK: type: QUERY PREHOOK: Input: default@json_t @@ -588,11 +584,11 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 POSTHOOK: Lineage: json_t.jstring EXPRESSION [] POSTHOOK: Lineage: json_t.key EXPRESSION [] -PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest1 -POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 diff --git ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out index 7fbd74d..a38b31b 100644 --- ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out +++ ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out @@ -5,34 +5,34 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@url_t PREHOOK: query: insert overwrite table url_t select * from ( - select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src limit 1 + select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src tablesample (1 rows) union all - select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src limit 1 + select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src tablesample (1 rows) union all - select '3', 'ftp://sites.google.com/a/example.com/site/page' from src limit 1 + select '3', 'ftp://sites.google.com/a/example.com/site/page' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', 'htttp://' from src limit 1 + select '5', 'htttp://' from src tablesample (1 rows) union all - select '6', '[invalid url string]' from src limit 1 + select '6', '[invalid url string]' from src tablesample (1 rows) ) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@url_t POSTHOOK: query: insert overwrite table url_t select * from ( - select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src limit 1 + select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src tablesample (1 rows) union all - select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src limit 1 + select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src tablesample (1 rows) union all - select '3', 'ftp://sites.google.com/a/example.com/site/page' from src limit 1 + select '3', 'ftp://sites.google.com/a/example.com/site/page' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', 'htttp://' from src limit 1 + select '5', 'htttp://' from src tablesample (1 rows) union all - select '6', '[invalid url string]' from src limit 1 + select '6', '[invalid url string]' from src tablesample (1 rows) ) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -231,7 +231,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select a.key, b.* from url_t a lateral view parse_url_tuple(a.fullurl, 'HOST', 'PATH', 'QUERY', 'REF', 'PROTOCOL', 'FILE', 'AUTHORITY', 'USERINFO', 'QUERY:k1') b as ho, pa, qu, re, pr, fi, au, us, qk1 order by a.key PREHOOK: type: QUERY PREHOOK: Input: default@url_t @@ -338,7 +337,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select parse_url_tuple(a.fullurl, 'HOST', 'PATH', 'QUERY', 'REF', 'PROTOCOL', 'FILE', 'AUTHORITY', 'USERINFO', 'QUERY:k1') as (ho, pa, qu, re, pr, fi, au, us, qk1) from url_t a order by ho, pa, qu PREHOOK: type: QUERY PREHOOK: Input: default@url_t @@ -510,7 +508,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select a.key, b.ho, b.qu, b.qk1, b.err1, b.err2, b.err3 from url_t a lateral view parse_url_tuple(a.fullurl, 'HOST', 'PATH', 'QUERY', 'REF', 'PROTOCOL', 'FILE', 'AUTHORITY', 'USERINFO', 'QUERY:k1', 'host', 'query', 'QUERY:nonExistCol') b as ho, pa, qu, re, pr, fi, au, us, qk1, err1, err2, err3 order by a.key PREHOOK: type: QUERY PREHOOK: Input: default@url_t @@ -669,7 +666,6 @@ STAGE PLANS: Fetch Operator limit: -1 - PREHOOK: query: select ho, count(*) from url_t a lateral view parse_url_tuple(a.fullurl, 'HOST', 'PATH', 'QUERY', 'REF', 'PROTOCOL', 'FILE', 'AUTHORITY', 'USERINFO', 'QUERY:k1') b as ho, pa, qu, re, pr, fi, au, us, qk1 where qk1 is not null group by ho PREHOOK: type: QUERY PREHOOK: Input: default@url_t