diff --git a/build.properties b/build.properties index 3009bba..5b745a9 100644 --- a/build.properties +++ b/build.properties @@ -137,6 +137,7 @@ ivy.changingPattern=.*SNAPSHOT ivy.publish.pattern=[artifact]-[revision].[ext] ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext] ivysettings.xml=${ivy.conf.dir}/ivysettings.xml +ivy.settings.file=${ivy.conf.dir}/ivysettings.xml ivyresolvelog=default ivy.mvn.repo=http://repo2.maven.org/maven2 ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index cf0c895..114ebf3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -7993,7 +7993,7 @@ private ExprNodeDesc genSamplePredicate(TableSample ts, } private String getAliasId(String alias, QB qb) { - return (qb.getId() == null ? alias : qb.getId() + ":" + alias); + return (qb.getId() == null ? alias : qb.getId() + ":" + alias).toLowerCase(); } @SuppressWarnings("nls") diff --git a/ql/src/test/queries/clientpositive/alter_varchar2.q b/ql/src/test/queries/clientpositive/alter_varchar2.q index 5a481e7..b870108 100644 --- a/ql/src/test/queries/clientpositive/alter_varchar2.q +++ b/ql/src/test/queries/clientpositive/alter_varchar2.q @@ -7,7 +7,7 @@ create table alter_varchar2 ( ) partitioned by (hr int); insert overwrite table alter_varchar2 partition (hr=1) - select value from src limit 1; + select value from src tablesample (1 rows); select c1, length(c1) from alter_varchar2; @@ -16,7 +16,9 @@ alter table alter_varchar2 change column c1 c1 varchar(10); select hr, c1, length(c1) from alter_varchar2 where hr = 1; insert overwrite table alter_varchar2 partition (hr=2) - select key from src limit 1; + select key from src tablesample (1 rows); + +set hive.fetch.task.conversion=more; select hr, c1, length(c1) from alter_varchar2 where hr = 1; select hr, c1, length(c1) from alter_varchar2 where hr = 2; diff --git a/ql/src/test/queries/clientpositive/auto_join_reordering_values.q b/ql/src/test/queries/clientpositive/auto_join_reordering_values.q index 46a4a0d..f0d79a3 100644 --- a/ql/src/test/queries/clientpositive/auto_join_reordering_values.q +++ b/ql/src/test/queries/clientpositive/auto_join_reordering_values.q @@ -4,10 +4,10 @@ create table testsrc ( `key` int,`val` string); load data local inpath '../data/files/kv1.txt' overwrite into table testsrc; drop table if exists orderpayment_small; create table orderpayment_small (`dealid` int,`date` string,`time` string, `cityid` int, `userid` int); -insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc limit 1; +insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc tablesample (1 rows); drop table if exists user_small; create table user_small( userid int); -insert overwrite table user_small select key from testsrc limit 100; +insert overwrite table user_small select key from testsrc tablesample (100 rows); set hive.auto.convert.join.noconditionaltask.size = 200; explain extended SELECT diff --git a/ql/src/test/queries/clientpositive/binary_constant.q b/ql/src/test/queries/clientpositive/binary_constant.q index e0a8b95..4f80dc3 100644 --- a/ql/src/test/queries/clientpositive/binary_constant.q +++ b/ql/src/test/queries/clientpositive/binary_constant.q @@ -1 +1,3 @@ -select cast(cast('a' as binary) as string) from src limit 1; +set hive.fetch.task.conversion=more; + +select cast(cast('a' as binary) as string) from src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/cast_to_int.q b/ql/src/test/queries/clientpositive/cast_to_int.q index 729ffdc..b1551f2 100644 --- a/ql/src/test/queries/clientpositive/cast_to_int.q +++ b/ql/src/test/queries/clientpositive/cast_to_int.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + -- cast string floats to integer types select cast('1' as float), @@ -27,4 +29,4 @@ select cast('127' as tinyint), cast('1.0a' as int), cast('-1.-1' as int) -from src limit 1; +from src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/compile_processor.q b/ql/src/test/queries/clientpositive/compile_processor.q index 2aabb52..56e561c 100644 --- a/ql/src/test/queries/clientpositive/compile_processor.q +++ b/ql/src/test/queries/clientpositive/compile_processor.q @@ -7,5 +7,5 @@ public class Pyth extends UDF { } ` AS GROOVY NAMED Pyth.groovy; CREATE TEMPORARY FUNCTION Pyth as 'Pyth'; -SELECT Pyth(3,4) FROM src limit 1; +SELECT Pyth(3,4) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/constant_prop.q b/ql/src/test/queries/clientpositive/constant_prop.q index ced72d6..d51b801 100644 --- a/ql/src/test/queries/clientpositive/constant_prop.q +++ b/ql/src/test/queries/clientpositive/constant_prop.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, @@ -7,7 +9,7 @@ SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, IF(ARRAY_CONTAINS(MAP_KEYS(MAP("b", "x")), "b"), "F2", "B2"), 2 ).F2 - FROM src LIMIT 1; + FROM src tablesample (1 rows); SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, @@ -17,4 +19,4 @@ SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, IF(ARRAY_CONTAINS(MAP_KEYS(MAP("b", "x")), "b"), "F2", "B2"), 2 ).F2 - FROM src LIMIT 1; + FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/ctas.q b/ql/src/test/queries/clientpositive/ctas.q index e595904..71af40e 100644 --- a/ql/src/test/queries/clientpositive/ctas.q +++ b/ql/src/test/queries/clientpositive/ctas.q @@ -56,7 +56,7 @@ set hive.exec.mode.local.auto=true; create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10; create table nzhang_ctas6 (key string, `to` string); -insert overwrite table nzhang_ctas6 select key, value from src limit 10; +insert overwrite table nzhang_ctas6 select key, value from src tablesample (10 rows); create table nzhang_ctas7 as select key, `to` from nzhang_ctas6; diff --git a/ql/src/test/queries/clientpositive/date_1.q b/ql/src/test/queries/clientpositive/date_1.q index a2322fc..7d89ac9 100644 --- a/ql/src/test/queries/clientpositive/date_1.q +++ b/ql/src/test/queries/clientpositive/date_1.q @@ -1,21 +1,23 @@ +set hive.fetch.task.conversion=more; + drop table date_1; create table date_1 (d date); insert overwrite table date_1 - select cast('2011-01-01' as date) from src limit 1; + select cast('2011-01-01' as date) from src tablesample (1 rows); select * from date_1 limit 1; select d, count(d) from date_1 group by d; insert overwrite table date_1 - select date '2011-01-01' from src limit 1; + select date '2011-01-01' from src tablesample (1 rows); select * from date_1 limit 1; select d, count(d) from date_1 group by d; insert overwrite table date_1 - select cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src limit 1; + select cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src tablesample (1 rows); select * from date_1 limit 1; select d, count(d) from date_1 group by d; diff --git a/ql/src/test/queries/clientpositive/date_3.q b/ql/src/test/queries/clientpositive/date_3.q index be25148..383fb4e 100644 --- a/ql/src/test/queries/clientpositive/date_3.q +++ b/ql/src/test/queries/clientpositive/date_3.q @@ -7,7 +7,7 @@ create table date_3 ( alter table date_3 add columns (c2 date); insert overwrite table date_3 - select 1, cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src limit 1; + select 1, cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src tablesample (1 rows); select * from date_3; diff --git a/ql/src/test/queries/clientpositive/date_4.q b/ql/src/test/queries/clientpositive/date_4.q index 4801a79..c840089 100644 --- a/ql/src/test/queries/clientpositive/date_4.q +++ b/ql/src/test/queries/clientpositive/date_4.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + drop table date_4; create table date_4 (d date); @@ -5,7 +7,7 @@ alter table date_4 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -- Test date literal syntax insert overwrite table date_4 - select date '2011-01-01' from src limit 1; + select date '2011-01-01' from src tablesample (1 rows); select d, date '2011-01-01' from date_4 limit 1; drop table date_4; diff --git a/ql/src/test/queries/clientpositive/date_comparison.q b/ql/src/test/queries/clientpositive/date_comparison.q index bdcb6c1..86c7362 100644 --- a/ql/src/test/queries/clientpositive/date_comparison.q +++ b/ql/src/test/queries/clientpositive/date_comparison.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + -- Comparisons against same value select cast('2011-05-06' as date) > cast('2011-05-06' as date) from src limit 1; diff --git a/ql/src/test/queries/clientpositive/decimal_1.q b/ql/src/test/queries/clientpositive/decimal_1.q index d865af4..f8b3661 100644 --- a/ql/src/test/queries/clientpositive/decimal_1.q +++ b/ql/src/test/queries/clientpositive/decimal_1.q @@ -1,18 +1,20 @@ +set hive.fetch.task.conversion=more; + drop table decimal_1; create table decimal_1 (t decimal(4,2)); alter table decimal_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; insert overwrite table decimal_1 - select cast('17.29' as decimal(4,2)) from src limit 1; -select cast(t as boolean) from decimal_1 limit 1; -select cast(t as tinyint) from decimal_1 limit 1; -select cast(t as smallint) from decimal_1 limit 1; -select cast(t as int) from decimal_1 limit 1; -select cast(t as bigint) from decimal_1 limit 1; -select cast(t as float) from decimal_1 limit 1; -select cast(t as double) from decimal_1 limit 1; -select cast(t as string) from decimal_1 limit 1; -select cast(t as timestamp) from decimal_1 limit 1; + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows); +select cast(t as boolean) from decimal_1; +select cast(t as tinyint) from decimal_1; +select cast(t as smallint) from decimal_1; +select cast(t as int) from decimal_1; +select cast(t as bigint) from decimal_1; +select cast(t as float) from decimal_1; +select cast(t as double) from decimal_1; +select cast(t as string) from decimal_1; +select cast(t as timestamp) from decimal_1; drop table decimal_1; diff --git a/ql/src/test/queries/clientpositive/decimal_2.q b/ql/src/test/queries/clientpositive/decimal_2.q index 4cf36a2..2c4d919 100644 --- a/ql/src/test/queries/clientpositive/decimal_2.q +++ b/ql/src/test/queries/clientpositive/decimal_2.q @@ -1,40 +1,42 @@ +set hive.fetch.task.conversion=more; + drop table decimal_2; create table decimal_2 (t decimal(18,9)); alter table decimal_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'; insert overwrite table decimal_2 - select cast('17.29' as decimal(4,2)) from src limit 1; + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows); -select cast(t as boolean) from decimal_2 limit 1; -select cast(t as tinyint) from decimal_2 limit 1; -select cast(t as smallint) from decimal_2 limit 1; -select cast(t as int) from decimal_2 limit 1; -select cast(t as bigint) from decimal_2 limit 1; -select cast(t as float) from decimal_2 limit 1; -select cast(t as double) from decimal_2 limit 1; -select cast(t as string) from decimal_2 limit 1; +select cast(t as boolean) from decimal_2; +select cast(t as tinyint) from decimal_2; +select cast(t as smallint) from decimal_2; +select cast(t as int) from decimal_2; +select cast(t as bigint) from decimal_2; +select cast(t as float) from decimal_2; +select cast(t as double) from decimal_2; +select cast(t as string) from decimal_2; insert overwrite table decimal_2 - select cast('3404045.5044003' as decimal(18,9)) from src limit 1; + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows); -select cast(t as boolean) from decimal_2 limit 1; -select cast(t as tinyint) from decimal_2 limit 1; -select cast(t as smallint) from decimal_2 limit 1; -select cast(t as int) from decimal_2 limit 1; -select cast(t as bigint) from decimal_2 limit 1; -select cast(t as float) from decimal_2 limit 1; -select cast(t as double) from decimal_2 limit 1; -select cast(t as string) from decimal_2 limit 1; +select cast(t as boolean) from decimal_2; +select cast(t as tinyint) from decimal_2; +select cast(t as smallint) from decimal_2; +select cast(t as int) from decimal_2; +select cast(t as bigint) from decimal_2; +select cast(t as float) from decimal_2; +select cast(t as double) from decimal_2; +select cast(t as string) from decimal_2; -select cast(3.14 as decimal(4,2)) from decimal_2 limit 1; -select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2 limit 1; -select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2 limit 1; -select cast(true as decimal) from decimal_2 limit 1; -select cast(3Y as decimal) from decimal_2 limit 1; -select cast(3S as decimal) from decimal_2 limit 1; -select cast(cast(3 as int) as decimal) from decimal_2 limit 1; -select cast(3L as decimal) from decimal_2 limit 1; -select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2 limit 1; -select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2 limit 1; +select cast(3.14 as decimal(4,2)) from decimal_2; +select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2; +select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2; +select cast(true as decimal) from decimal_2; +select cast(3Y as decimal) from decimal_2; +select cast(3S as decimal) from decimal_2; +select cast(cast(3 as int) as decimal) from decimal_2; +select cast(3L as decimal) from decimal_2; +select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2; +select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2; drop table decimal_2; diff --git a/ql/src/test/queries/clientpositive/decimal_udf.q b/ql/src/test/queries/clientpositive/decimal_udf.q index 06d1785..a3e4ada 100644 --- a/ql/src/test/queries/clientpositive/decimal_udf.q +++ b/ql/src/test/queries/clientpositive/decimal_udf.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DROP TABLE IF EXISTS DECIMAL_UDF; CREATE TABLE DECIMAL_UDF (key decimal(65,30), value int) diff --git a/ql/src/test/queries/clientpositive/disallow_incompatible_type_change_off.q b/ql/src/test/queries/clientpositive/disallow_incompatible_type_change_off.q index 2a1e727..991b930 100644 --- a/ql/src/test/queries/clientpositive/disallow_incompatible_type_change_off.q +++ b/ql/src/test/queries/clientpositive/disallow_incompatible_type_change_off.q @@ -1,7 +1,9 @@ +set hive.fetch.task.conversion=more; + SET hive.metastore.disallow.incompatible.col.type.changes=false; SELECT * FROM src LIMIT 1; CREATE TABLE test_table123 (a INT, b MAP) PARTITIONED BY (ds STRING) STORED AS SEQUENCEFILE; -INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src LIMIT 1; +INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src tablesample (1 rows); SELECT * from test_table123 WHERE ds="foo1"; -- This should now work as hive.metastore.disallow.incompatible.col.type.changes is false ALTER TABLE test_table123 REPLACE COLUMNS (a INT, b STRING); diff --git a/ql/src/test/queries/clientpositive/filter_join_breaktask2.q b/ql/src/test/queries/clientpositive/filter_join_breaktask2.q index f8d855b..7f4258f 100644 --- a/ql/src/test/queries/clientpositive/filter_join_breaktask2.q +++ b/ql/src/test/queries/clientpositive/filter_join_breaktask2.q @@ -12,11 +12,11 @@ create table T3 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string); create table T4 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string); -insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src limit 1; +insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows); -insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src limit 1; +insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows); -insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src limit 1; +insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows); insert overwrite table T4 partition(ds='2010-04-17') select 4,'1','1','8','4','5','1','0','9','U','2','2', '0','2','1','1','J','C','A','U', '2','s', '2',NULL, NULL, NULL,NULL, NULL, NULL,'1','j', 'S', '6',NULL,'1', '2', 'J', 'g', '1', 'e', '2', '1', '2', 'U', 'P', 'p', '3', '0', '0', '0', '1', '1', '1', '0', '0', '0', '6', '2', 'j',NULL, NULL, NULL,NULL,NULL, NULL, '5',NULL, 'j', 'j', 2, 2, 1, '2', '2', '1', '1', '1', '1', '1', '1', 1, 1, 32,NULL from src limit 1; diff --git a/ql/src/test/queries/clientpositive/lateral_view_noalias.q b/ql/src/test/queries/clientpositive/lateral_view_noalias.q index 11e54a5..df73432 100644 --- a/ql/src/test/queries/clientpositive/lateral_view_noalias.q +++ b/ql/src/test/queries/clientpositive/lateral_view_noalias.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + --HIVE-2608 Do not require AS a,b,c part in LATERAL VIEW EXPLAIN SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2; SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2; diff --git a/ql/src/test/queries/clientpositive/literal_decimal.q b/ql/src/test/queries/clientpositive/literal_decimal.q index a6ad4b8..08b21dc 100644 --- a/ql/src/test/queries/clientpositive/literal_decimal.q +++ b/ql/src/test/queries/clientpositive/literal_decimal.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT -1BD, 0BD, 1BD, 3.14BD, -3.14BD, 99999999999999999BD, 99999999999999999.9999999999999BD, 1E-99BD, 1E99BD FROM src LIMIT 1; SELECT -1BD, 0BD, 1BD, 3.14BD, -3.14BD, 99999999999999999BD, 99999999999999999.9999999999999BD, 1E-99BD, 1E99BD FROM src LIMIT 1; diff --git a/ql/src/test/queries/clientpositive/literal_double.q b/ql/src/test/queries/clientpositive/literal_double.q index 0883612..766da69 100644 --- a/ql/src/test/queries/clientpositive/literal_double.q +++ b/ql/src/test/queries/clientpositive/literal_double.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT 3.14, -3.14, 3.14e8, 3.14e-8, -3.14e8, -3.14e-8, 3.14e+8, 3.14E8, 3.14E-8 FROM src LIMIT 1; SELECT 3.14, -3.14, 3.14e8, 3.14e-8, -3.14e8, -3.14e-8, 3.14e+8, 3.14E8, 3.14E-8 FROM src LIMIT 1; diff --git a/ql/src/test/queries/clientpositive/literal_ints.q b/ql/src/test/queries/clientpositive/literal_ints.q index 9da622e..5fd0cfa 100644 --- a/ql/src/test/queries/clientpositive/literal_ints.q +++ b/ql/src/test/queries/clientpositive/literal_ints.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT 100, 100Y, 100S, 100L FROM src LIMIT 1; SELECT 100, 100Y, 100S, 100L FROM src LIMIT 1; diff --git a/ql/src/test/queries/clientpositive/literal_string.q b/ql/src/test/queries/clientpositive/literal_string.q index 21f0890..c57dc57 100644 --- a/ql/src/test/queries/clientpositive/literal_string.q +++ b/ql/src/test/queries/clientpositive/literal_string.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT 'face''book', 'face' 'book', 'face' 'book', "face""book", "face" "book", "face" diff --git a/ql/src/test/queries/clientpositive/macro.q b/ql/src/test/queries/clientpositive/macro.q index fd0f7f2..47b05ff 100644 --- a/ql/src/test/queries/clientpositive/macro.q +++ b/ql/src/test/queries/clientpositive/macro.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)); SELECT SIGMOID(2) FROM src LIMIT 1; EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1; diff --git a/ql/src/test/queries/clientpositive/null_cast.q b/ql/src/test/queries/clientpositive/null_cast.q index 48c39b8..bd0cb8d 100644 --- a/ql/src/test/queries/clientpositive/null_cast.q +++ b/ql/src/test/queries/clientpositive/null_cast.q @@ -2,10 +2,10 @@ EXPLAIN SELECT ARRAY(NULL, 0), ARRAY(NULL, ARRAY()), ARRAY(NULL, MAP()), ARRAY(NULL, STRUCT(0)) - FROM src LIMIT 1; + FROM src tablesample (1 rows); SELECT ARRAY(NULL, 0), ARRAY(NULL, ARRAY()), ARRAY(NULL, MAP()), ARRAY(NULL, STRUCT(0)) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/num_op_type_conv.q b/ql/src/test/queries/clientpositive/num_op_type_conv.q index 7f858d3..d51c210 100644 --- a/ql/src/test/queries/clientpositive/num_op_type_conv.q +++ b/ql/src/test/queries/clientpositive/num_op_type_conv.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT null + 7, 1.0 - null, null + null, CAST(21 AS BIGINT) % CAST(5 AS TINYINT), CAST(21 AS BIGINT) % CAST(21 AS BIGINT), diff --git a/ql/src/test/queries/clientpositive/ops_comparison.q b/ql/src/test/queries/clientpositive/ops_comparison.q index b685ae6..ec9e807 100644 --- a/ql/src/test/queries/clientpositive/ops_comparison.q +++ b/ql/src/test/queries/clientpositive/ops_comparison.q @@ -1,3 +1,4 @@ +set hive.fetch.task.conversion=more; select 1.0 < 2.0 from src limit 1; select 2.0 < 2.0 from src limit 1; diff --git a/ql/src/test/queries/clientpositive/orc_diff_part_cols.q b/ql/src/test/queries/clientpositive/orc_diff_part_cols.q index cbfd7b3..0c8861e 100644 --- a/ql/src/test/queries/clientpositive/orc_diff_part_cols.q +++ b/ql/src/test/queries/clientpositive/orc_diff_part_cols.q @@ -10,7 +10,7 @@ set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; -- to another partition -- This can produce unexpected results with CombineHiveInputFormat -INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5; +INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows); ALTER TABLE test_orc ADD COLUMNS (cnt INT); diff --git a/ql/src/test/queries/clientpositive/orc_empty_strings.q b/ql/src/test/queries/clientpositive/orc_empty_strings.q index 0ef57d1..34cd6d4 100644 --- a/ql/src/test/queries/clientpositive/orc_empty_strings.q +++ b/ql/src/test/queries/clientpositive/orc_empty_strings.q @@ -3,13 +3,13 @@ ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'; -INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10; +INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows); -- Test reading a column which is just empty strings SELECT * FROM test_orc; -INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10; +INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows); -- Test reading a column which has some empty strings diff --git a/ql/src/test/queries/clientpositive/partcols1.q b/ql/src/test/queries/clientpositive/partcols1.q index b7f8c64..03a5760 100644 --- a/ql/src/test/queries/clientpositive/partcols1.q +++ b/ql/src/test/queries/clientpositive/partcols1.q @@ -1,7 +1,7 @@ create table test1(col1 string) partitioned by (partitionId int); insert overwrite table test1 partition (partitionId=1) - select key from src limit 10; + select key from src tablesample (10 rows); FROM ( FROM test1 diff --git a/ql/src/test/queries/clientpositive/partition_date.q b/ql/src/test/queries/clientpositive/partition_date.q index 8738afd..3c031db 100644 --- a/ql/src/test/queries/clientpositive/partition_date.q +++ b/ql/src/test/queries/clientpositive/partition_date.q @@ -2,14 +2,14 @@ drop table partition_date_1; create table partition_date_1 (key string, value string) partitioned by (dt date, region int); -insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) - select * from src limit 10; -insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) - select * from src limit 5; +insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) + select * from src tablesample (10 rows); +insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) + select * from src tablesample (5 rows); insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) - select * from src limit 20; + select * from src tablesample (20 rows); insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) - select * from src limit 11; + select * from src tablesample (11 rows); select distinct dt from partition_date_1; select * from partition_date_1 where dt = '2000-01-01' and region = 2 order by key,value; diff --git a/ql/src/test/queries/clientpositive/partition_date2.q b/ql/src/test/queries/clientpositive/partition_date2.q index 9b84b59..c932ed1 100644 --- a/ql/src/test/queries/clientpositive/partition_date2.q +++ b/ql/src/test/queries/clientpositive/partition_date2.q @@ -3,7 +3,7 @@ drop table partition_date2_1; create table partition_date2_1 (key string, value string) partitioned by (dt date, region int); -- test date literal syntax -from (select * from src limit 1) x +from (select * from src tablesample (1 rows)) x insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=1) select * insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) select * insert overwrite table partition_date2_1 partition(dt=date '1999-01-01', region=2) select *; @@ -13,7 +13,7 @@ select * from partition_date2_1; -- insert overwrite insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) - select 'changed_key', 'changed_value' from src limit 2; + select 'changed_key', 'changed_value' from src tablesample (2 rows); select * from partition_date2_1; -- truncate @@ -41,7 +41,7 @@ alter table partition_date2_1 partition(dt=date '1980-01-02', region=3) describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3); insert overwrite table partition_date2_1 partition(dt=date '1980-01-02', region=3) - select * from src limit 2; + select * from src tablesample (2 rows); select * from partition_date2_1 order by key,value,dt,region; -- alter table set location diff --git a/ql/src/test/queries/clientpositive/partition_decode_name.q b/ql/src/test/queries/clientpositive/partition_decode_name.q index ba193cd..a8381a4 100644 --- a/ql/src/test/queries/clientpositive/partition_decode_name.q +++ b/ql/src/test/queries/clientpositive/partition_decode_name.q @@ -1,9 +1,9 @@ create table sc as select * -from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 +from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 + select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s; + select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s; create table sc_part (key string) partitioned by (ts string) stored as rcfile; diff --git a/ql/src/test/queries/clientpositive/partition_special_char.q b/ql/src/test/queries/clientpositive/partition_special_char.q index 8134433..b0b1ff4 100644 --- a/ql/src/test/queries/clientpositive/partition_special_char.q +++ b/ql/src/test/queries/clientpositive/partition_special_char.q @@ -1,9 +1,9 @@ create table sc as select * -from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 +from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 + select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s; + select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s; create table sc_part (key string) partitioned by (ts string) stored as rcfile; diff --git a/ql/src/test/queries/clientpositive/partition_varchar1.q b/ql/src/test/queries/clientpositive/partition_varchar1.q index d700b1c..22aadd3 100644 --- a/ql/src/test/queries/clientpositive/partition_varchar1.q +++ b/ql/src/test/queries/clientpositive/partition_varchar1.q @@ -3,13 +3,13 @@ drop table partition_varchar_1; create table partition_varchar_1 (key string, value varchar(20)) partitioned by (dt varchar(10), region int); insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=1) - select * from src limit 10; + select * from src tablesample (10 rows); insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=2) - select * from src limit 5; + select * from src tablesample (5 rows); insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=1) - select * from src limit 20; + select * from src tablesample (20 rows); insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=10) - select * from src limit 11; + select * from src tablesample (11 rows); select distinct dt from partition_varchar_1; select * from partition_varchar_1 where dt = '2000-01-01' and region = 2 order by key,value; diff --git a/ql/src/test/queries/clientpositive/ppd_union_view.q b/ql/src/test/queries/clientpositive/ppd_union_view.q index d635e2d..a7606c5 100644 --- a/ql/src/test/queries/clientpositive/ppd_union_view.q +++ b/ql/src/test/queries/clientpositive/ppd_union_view.q @@ -5,26 +5,26 @@ drop view v; create table t1_new (key string, value string) partitioned by (ds string); insert overwrite table t1_new partition (ds = '2011-10-15') -select 'key1', 'value1' from src limit 1; +select 'key1', 'value1' from src tablesample (1 rows); insert overwrite table t1_new partition (ds = '2011-10-16') -select 'key2', 'value2' from src limit 1; +select 'key2', 'value2' from src tablesample (1 rows); create table t1_old (keymap string, value string) partitioned by (ds string); insert overwrite table t1_old partition (ds = '2011-10-13') -select 'keymap3', 'value3' from src limit 1; +select 'keymap3', 'value3' from src tablesample (1 rows); insert overwrite table t1_old partition (ds = '2011-10-14') -select 'keymap4', 'value4' from src limit 1; +select 'keymap4', 'value4' from src tablesample (1 rows); create table t1_mapping (key string, keymap string) partitioned by (ds string); insert overwrite table t1_mapping partition (ds = '2011-10-13') -select 'key3', 'keymap3' from src limit 1; +select 'key3', 'keymap3' from src tablesample (1 rows); insert overwrite table t1_mapping partition (ds = '2011-10-14') -select 'key4', 'keymap4' from src limit 1; +select 'key4', 'keymap4' from src tablesample (1 rows); create view t1 partitioned on (ds) as diff --git a/ql/src/test/queries/clientpositive/ppr_pushdown.q b/ql/src/test/queries/clientpositive/ppr_pushdown.q index 860dd63..440005f 100644 --- a/ql/src/test/queries/clientpositive/ppr_pushdown.q +++ b/ql/src/test/queries/clientpositive/ppr_pushdown.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + create table ppr_test (key string) partitioned by (ds string); alter table ppr_test add partition (ds = '1234'); @@ -9,14 +11,14 @@ alter table ppr_test add partition (ds = '12:4'); alter table ppr_test add partition (ds = '12%4'); alter table ppr_test add partition (ds = '12*4'); -insert overwrite table ppr_test partition(ds = '1234') select * from (select '1234' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '1224') select * from (select '1224' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '1214') select * from (select '1214' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '12+4') select * from (select '12+4' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '12.4') select * from (select '12.4' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '12:4') select * from (select '12:4' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '12%4') select * from (select '12%4' from src limit 1 union all select 'abcd' from src limit 1) s; -insert overwrite table ppr_test partition(ds = '12*4') select * from (select '12*4' from src limit 1 union all select 'abcd' from src limit 1) s; +insert overwrite table ppr_test partition(ds = '1234') select * from (select '1234' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '1224') select * from (select '1224' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '1214') select * from (select '1214' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '12+4') select * from (select '12+4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '12.4') select * from (select '12.4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '12:4') select * from (select '12:4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '12%4') select * from (select '12%4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; +insert overwrite table ppr_test partition(ds = '12*4') select * from (select '12*4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s; select * from ppr_test where ds = '1234' order by key; diff --git a/ql/src/test/queries/clientpositive/ppr_pushdown2.q b/ql/src/test/queries/clientpositive/ppr_pushdown2.q index 67c0da0..8c60906 100644 --- a/ql/src/test/queries/clientpositive/ppr_pushdown2.q +++ b/ql/src/test/queries/clientpositive/ppr_pushdown2.q @@ -1,24 +1,26 @@ +set hive.fetch.task.conversion=more; + create table ppr_test (key string) partitioned by (ds string); -insert overwrite table ppr_test partition(ds='2') select '2' from src limit 1; -insert overwrite table ppr_test partition(ds='22') select '22' from src limit 1; +insert overwrite table ppr_test partition(ds='2') select '2' from src tablesample (1 rows); +insert overwrite table ppr_test partition(ds='22') select '22' from src tablesample (1 rows); select * from ppr_test where ds = '2'; select * from ppr_test where ds = '22'; create table ppr_test2 (key string) partitioned by (ds string, s string); -insert overwrite table ppr_test2 partition(ds='1', s='2') select '1' from src limit 1; -insert overwrite table ppr_test2 partition(ds='2', s='1') select '2' from src limit 1; +insert overwrite table ppr_test2 partition(ds='1', s='2') select '1' from src tablesample (1 rows); +insert overwrite table ppr_test2 partition(ds='2', s='1') select '2' from src tablesample (1 rows); select * from ppr_test2 where s = '1'; select * from ppr_test2 where ds = '1'; create table ppr_test3 (key string) partitioned by (col string, ol string, l string); -insert overwrite table ppr_test3 partition(col='1', ol='2', l = '3') select '1' from src limit 1; -insert overwrite table ppr_test3 partition(col='1', ol='1', l = '2') select '2' from src limit 1; -insert overwrite table ppr_test3 partition(col='1', ol='2', l = '1') select '3' from src limit 1; +insert overwrite table ppr_test3 partition(col='1', ol='2', l = '3') select '1' from src tablesample (1 rows); +insert overwrite table ppr_test3 partition(col='1', ol='1', l = '2') select '2' from src tablesample (1 rows); +insert overwrite table ppr_test3 partition(col='1', ol='2', l = '1') select '3' from src tablesample (1 rows); select * from ppr_test3 where l = '1'; select * from ppr_test3 where l = '2'; diff --git a/ql/src/test/queries/clientpositive/quote2.q b/ql/src/test/queries/clientpositive/quote2.q index 65b9f87..c93902a 100644 --- a/ql/src/test/queries/clientpositive/quote2.q +++ b/ql/src/test/queries/clientpositive/quote2.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT 'abc', "abc", diff --git a/ql/src/test/queries/clientpositive/str_to_map.q b/ql/src/test/queries/clientpositive/str_to_map.q index c3b206b..ae83407 100644 --- a/ql/src/test/queries/clientpositive/str_to_map.q +++ b/ql/src/test/queries/clientpositive/str_to_map.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + desc function str_to_map; desc function extended str_to_map; @@ -19,7 +21,7 @@ limit 3; drop table tbl_s2m; -create table tbl_s2m as select 'ABC=CC_333=444' as t from src limit 3; +create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows); select str_to_map(t,'_','=')['333'] from tbl_s2m; diff --git a/ql/src/test/queries/clientpositive/test_boolean_whereclause.q b/ql/src/test/queries/clientpositive/test_boolean_whereclause.q index d2da5ac..a4f0fdb 100644 --- a/ql/src/test/queries/clientpositive/test_boolean_whereclause.q +++ b/ql/src/test/queries/clientpositive/test_boolean_whereclause.q @@ -1,5 +1,5 @@ create table if not exists test_boolean(dummy tinyint); -insert overwrite table test_boolean select 1 from src limit 1; +insert overwrite table test_boolean select 1 from src tablesample (1 rows); SELECT 1 FROM ( diff --git a/ql/src/test/queries/clientpositive/timestamp_1.q b/ql/src/test/queries/clientpositive/timestamp_1.q index f2c3b59..ce79eef 100644 --- a/ql/src/test/queries/clientpositive/timestamp_1.q +++ b/ql/src/test/queries/clientpositive/timestamp_1.q @@ -1,10 +1,12 @@ +set hive.fetch.task.conversion=more; + drop table timestamp_1; create table timestamp_1 (t timestamp); alter table timestamp_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; -insert overwrite table timestamp_1 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1; +insert overwrite table timestamp_1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -15,7 +17,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01' from src limit 1; + select '2011-01-01 01:01:01' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -26,7 +28,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.1' from src limit 1; + select '2011-01-01 01:01:01.1' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -37,7 +39,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.0001' from src limit 1; + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -48,7 +50,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.000100000' from src limit 1; + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; @@ -59,7 +61,7 @@ select cast(t as double) from timestamp_1 limit 1; select cast(t as string) from timestamp_1 limit 1; insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.001000011' from src limit 1; + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows); select cast(t as boolean) from timestamp_1 limit 1; select cast(t as tinyint) from timestamp_1 limit 1; select cast(t as smallint) from timestamp_1 limit 1; diff --git a/ql/src/test/queries/clientpositive/timestamp_2.q b/ql/src/test/queries/clientpositive/timestamp_2.q index b93208f..351f5ca 100644 --- a/ql/src/test/queries/clientpositive/timestamp_2.q +++ b/ql/src/test/queries/clientpositive/timestamp_2.q @@ -1,10 +1,12 @@ +set hive.fetch.task.conversion=more; + drop table timestamp_2; create table timestamp_2 (t timestamp); alter table timestamp_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'; -insert overwrite table timestamp_2 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1; +insert overwrite table timestamp_2 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -15,7 +17,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01' from src limit 1; + select '2011-01-01 01:01:01' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -26,7 +28,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.1' from src limit 1; + select '2011-01-01 01:01:01.1' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -37,7 +39,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.0001' from src limit 1; + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -48,7 +50,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.000100000' from src limit 1; + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; @@ -59,7 +61,7 @@ select cast(t as double) from timestamp_2 limit 1; select cast(t as string) from timestamp_2 limit 1; insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.001000011' from src limit 1; + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows); select cast(t as boolean) from timestamp_2 limit 1; select cast(t as tinyint) from timestamp_2 limit 1; select cast(t as smallint) from timestamp_2 limit 1; diff --git a/ql/src/test/queries/clientpositive/timestamp_3.q b/ql/src/test/queries/clientpositive/timestamp_3.q index cda724f..e5a4345 100644 --- a/ql/src/test/queries/clientpositive/timestamp_3.q +++ b/ql/src/test/queries/clientpositive/timestamp_3.q @@ -1,10 +1,12 @@ +set hive.fetch.task.conversion=more; + drop table timestamp_3; create table timestamp_3 (t timestamp); alter table timestamp_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; -insert overwrite table timestamp_3 - select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1; +insert overwrite table timestamp_3 + select cast(cast('1.3041352164485E9' as double) as timestamp) from src tablesample (1 rows); select cast(t as boolean) from timestamp_3 limit 1; select cast(t as tinyint) from timestamp_3 limit 1; select cast(t as smallint) from timestamp_3 limit 1; diff --git a/ql/src/test/queries/clientpositive/timestamp_comparison.q b/ql/src/test/queries/clientpositive/timestamp_comparison.q index f64ae48..30fee3c 100644 --- a/ql/src/test/queries/clientpositive/timestamp_comparison.q +++ b/ql/src/test/queries/clientpositive/timestamp_comparison.q @@ -1,5 +1,6 @@ +set hive.fetch.task.conversion=more; -select cast('2011-05-06 07:08:09' as timestamp) > +select cast('2011-05-06 07:08:09' as timestamp) > cast('2011-05-06 07:08:09' as timestamp) from src limit 1; select cast('2011-05-06 07:08:09' as timestamp) < diff --git a/ql/src/test/queries/clientpositive/timestamp_lazy.q b/ql/src/test/queries/clientpositive/timestamp_lazy.q index 7a10052..e9a0cfa 100644 --- a/ql/src/test/queries/clientpositive/timestamp_lazy.q +++ b/ql/src/test/queries/clientpositive/timestamp_lazy.q @@ -1,6 +1,6 @@ drop table timestamp_lazy; create table timestamp_lazy (t timestamp, key string, value string); -insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src limit 5; +insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src tablesample (5 rows); select t,key,value from timestamp_lazy ORDER BY key ASC, value ASC; select t,key,value from timestamp_lazy distribute by t sort by key ASC, value ASC; diff --git a/ql/src/test/queries/clientpositive/timestamp_udf.q b/ql/src/test/queries/clientpositive/timestamp_udf.q index 2620ace..ade9fb4 100644 --- a/ql/src/test/queries/clientpositive/timestamp_udf.q +++ b/ql/src/test/queries/clientpositive/timestamp_udf.q @@ -1,13 +1,15 @@ +set hive.fetch.task.conversion=more; + drop table timestamp_udf; drop table timestamp_udf_string; create table timestamp_udf (t timestamp); create table timestamp_udf_string (t string); -from src +from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' insert overwrite table timestamp_udf_string - select '2011-05-06 07:08:09.1234567' limit 1; + select '2011-05-06 07:08:09.1234567'; -- Test UDFs with Timestamp input select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t), diff --git a/ql/src/test/queries/clientpositive/transform1.q b/ql/src/test/queries/clientpositive/transform1.q index 962077c..3bed2b6 100644 --- a/ql/src/test/queries/clientpositive/transform1.q +++ b/ql/src/test/queries/clientpositive/transform1.q @@ -12,7 +12,7 @@ SELECT transform(*) USING 'cat' AS (col array) FROM transform1_t1; create table transform1_t2(col array); insert overwrite table transform1_t2 -select array(1,2,3) from src limit 1; +select array(1,2,3) from src tablesample (1 rows); EXPLAIN SELECT transform('0\0021\0022') USING 'cat' AS (col array) FROM transform1_t2; diff --git a/ql/src/test/queries/clientpositive/truncate_column.q b/ql/src/test/queries/clientpositive/truncate_column.q index d756b47..0bfb23e 100644 --- a/ql/src/test/queries/clientpositive/truncate_column.q +++ b/ql/src/test/queries/clientpositive/truncate_column.q @@ -5,7 +5,7 @@ ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' STORED A set hive.stats.autogather=true; -INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10; +INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows); DESC FORMATTED test_tab; @@ -20,7 +20,7 @@ DESC FORMATTED test_tab; SELECT * FROM test_tab ORDER BY value; -- Truncate multiple columns -INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10; +INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows); TRUNCATE TABLE test_tab COLUMNS (key, value); @@ -40,7 +40,7 @@ SELECT * FROM test_tab ORDER BY value; -- Test truncating with a binary serde ALTER TABLE test_tab SET SERDE 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'; -INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10; +INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows); DESC FORMATTED test_tab; @@ -65,7 +65,7 @@ SELECT * FROM test_tab ORDER BY value; -- Test truncating a partition CREATE TABLE test_tab_part (key STRING, value STRING) PARTITIONED BY (part STRING) STORED AS RCFILE; -INSERT OVERWRITE TABLE test_tab_part PARTITION (part = '1') SELECT * FROM src LIMIT 10; +INSERT OVERWRITE TABLE test_tab_part PARTITION (part = '1') SELECT * FROM src tablesample (10 rows); DESC FORMATTED test_tab_part PARTITION (part = '1'); diff --git a/ql/src/test/queries/clientpositive/truncate_column_merge.q b/ql/src/test/queries/clientpositive/truncate_column_merge.q index a7aab35..7a59efc 100644 --- a/ql/src/test/queries/clientpositive/truncate_column_merge.q +++ b/ql/src/test/queries/clientpositive/truncate_column_merge.q @@ -2,9 +2,9 @@ CREATE TABLE test_tab (key STRING, value STRING) STORED AS RCFILE; -INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 5; +INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (5 rows); -INSERT INTO TABLE test_tab SELECT * FROM src LIMIT 5; +INSERT INTO TABLE test_tab SELECT * FROM src tablesample (5 rows); -- The value should be 2 indicating the table has 2 files SELECT COUNT(DISTINCT INPUT__FILE__NAME) FROM test_tab; diff --git a/ql/src/test/queries/clientpositive/type_cast_1.q b/ql/src/test/queries/clientpositive/type_cast_1.q index a160732..4d1d978 100644 --- a/ql/src/test/queries/clientpositive/type_cast_1.q +++ b/ql/src/test/queries/clientpositive/type_cast_1.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT IF(false, 1, cast(2 as smallint)) + 3 FROM src LIMIT 1; diff --git a/ql/src/test/queries/clientpositive/type_conversions_1.q b/ql/src/test/queries/clientpositive/type_conversions_1.q index 63dd66e..4c4a828 100644 --- a/ql/src/test/queries/clientpositive/type_conversions_1.q +++ b/ql/src/test/queries/clientpositive/type_conversions_1.q @@ -1,3 +1,4 @@ +set hive.fetch.task.conversion=more; -- casting from null should yield null select diff --git a/ql/src/test/queries/clientpositive/type_widening.q b/ql/src/test/queries/clientpositive/type_widening.q index 0d36bc4..b18c014 100644 --- a/ql/src/test/queries/clientpositive/type_widening.q +++ b/ql/src/test/queries/clientpositive/type_widening.q @@ -1,3 +1,4 @@ +set hive.fetch.task.conversion=more; -- Check for int, bigint automatic type widening conversions in UDFs, UNIONS EXPLAIN SELECT COALESCE(0, 9223372036854775807) FROM src LIMIT 1; SELECT COALESCE(0, 9223372036854775807) FROM src LIMIT 1; diff --git a/ql/src/test/queries/clientpositive/udf_E.q b/ql/src/test/queries/clientpositive/udf_E.q index 113af61..41bdec0 100644 --- a/ql/src/test/queries/clientpositive/udf_E.q +++ b/ql/src/test/queries/clientpositive/udf_E.q @@ -1,14 +1,16 @@ -explain -select E() FROM src LIMIT 1; +set hive.fetch.task.conversion=more; + +explain +select E() FROM src tablesample (1 rows); -select E() FROM src LIMIT 1; +select E() FROM src tablesample (1 rows); DESCRIBE FUNCTION E; DESCRIBE FUNCTION EXTENDED E; explain -select E() FROM src LIMIT 1; +select E() FROM src tablesample (1 rows); -select E() FROM src LIMIT 1; +select E() FROM src tablesample (1 rows); DESCRIBE FUNCTION E; DESCRIBE FUNCTION EXTENDED E; diff --git a/ql/src/test/queries/clientpositive/udf_PI.q b/ql/src/test/queries/clientpositive/udf_PI.q index 1fde7df..945483e 100644 --- a/ql/src/test/queries/clientpositive/udf_PI.q +++ b/ql/src/test/queries/clientpositive/udf_PI.q @@ -1,14 +1,16 @@ -explain -select PI() FROM src LIMIT 1; +set hive.fetch.task.conversion=more; + +explain +select PI() FROM src tablesample (1 rows); -select PI() FROM src LIMIT 1; +select PI() FROM src tablesample (1 rows); DESCRIBE FUNCTION PI; DESCRIBE FUNCTION EXTENDED PI; explain -select PI() FROM src LIMIT 1; +select PI() FROM src tablesample (1 rows); -select PI() FROM src LIMIT 1; +select PI() FROM src tablesample (1 rows); DESCRIBE FUNCTION PI; DESCRIBE FUNCTION EXTENDED PI; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_abs.q b/ql/src/test/queries/clientpositive/udf_abs.q index f4f227d..0c06a5b 100644 --- a/ql/src/test/queries/clientpositive/udf_abs.q +++ b/ql/src/test/queries/clientpositive/udf_abs.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION abs; DESCRIBE FUNCTION EXTENDED abs; @@ -7,7 +9,7 @@ EXPLAIN SELECT abs(123), abs(-9223372036854775807), abs(9223372036854775807) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT abs(0), @@ -15,16 +17,16 @@ SELECT abs(123), abs(-9223372036854775807), abs(9223372036854775807) -FROM src LIMIT 1; +FROM src tablesample (1 rows); EXPLAIN SELECT abs(0.0), abs(-3.14159265), abs(3.14159265) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT abs(0.0), abs(-3.14159265), abs(3.14159265) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_acos.q b/ql/src/test/queries/clientpositive/udf_acos.q index 625a2aa..f9adc16 100644 --- a/ql/src/test/queries/clientpositive/udf_acos.q +++ b/ql/src/test/queries/clientpositive/udf_acos.q @@ -1,14 +1,16 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION acos; DESCRIBE FUNCTION EXTENDED acos; SELECT acos(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT acos(0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT acos(-0.5), asin(0.66) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT acos(2) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_array.q b/ql/src/test/queries/clientpositive/udf_array.q index fca8fe8..5a6a183 100644 --- a/ql/src/test/queries/clientpositive/udf_array.q +++ b/ql/src/test/queries/clientpositive/udf_array.q @@ -1,8 +1,10 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION array; DESCRIBE FUNCTION EXTENDED array; EXPLAIN SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2], -array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1; +array(array(1), array(2), array(3), array(4))[1][0] FROM src tablesample (1 rows); SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2], -array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1; +array(array(1), array(2), array(3), array(4))[1][0] FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_array_contains.q b/ql/src/test/queries/clientpositive/udf_array_contains.q index 937bb0b..d2dad64 100644 --- a/ql/src/test/queries/clientpositive/udf_array_contains.q +++ b/ql/src/test/queries/clientpositive/udf_array_contains.q @@ -1,9 +1,11 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION array_contains; DESCRIBE FUNCTION EXTENDED array_contains; -- evalutes function for array of primitives -SELECT array_contains(array(1, 2, 3), 1) FROM src LIMIT 1; +SELECT array_contains(array(1, 2, 3), 1) FROM src tablesample (1 rows); -- evaluates function for nested arrays SELECT array_contains(array(array(1,2), array(2,3), array(3,4)), array(1,2)) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_ascii.q b/ql/src/test/queries/clientpositive/udf_ascii.q index 53b389f..3d885a2 100644 --- a/ql/src/test/queries/clientpositive/udf_ascii.q +++ b/ql/src/test/queries/clientpositive/udf_ascii.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION ascii; DESCRIBE FUNCTION EXTENDED ascii; @@ -5,10 +7,10 @@ EXPLAIN SELECT ascii('Facebook'), ascii(''), ascii('!') -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT ascii('Facebook'), ascii(''), ascii('!') -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_asin.q b/ql/src/test/queries/clientpositive/udf_asin.q index f95a5f5..73b77d1 100644 --- a/ql/src/test/queries/clientpositive/udf_asin.q +++ b/ql/src/test/queries/clientpositive/udf_asin.q @@ -1,14 +1,16 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION asin; DESCRIBE FUNCTION EXTENDED asin; SELECT asin(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT asin(0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT asin(-0.5), asin(0.66) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT asin(2) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_atan.q b/ql/src/test/queries/clientpositive/udf_atan.q index d4ef03d..090438c 100644 --- a/ql/src/test/queries/clientpositive/udf_atan.q +++ b/ql/src/test/queries/clientpositive/udf_atan.q @@ -1,16 +1,18 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION atan; DESCRIBE FUNCTION EXTENDED atan; SELECT atan(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT atan(1), atan(6), atan(-1.0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); DESCRIBE FUNCTION atan; DESCRIBE FUNCTION EXTENDED atan; SELECT atan(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT atan(1), atan(6), atan(-1.0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_between.q b/ql/src/test/queries/clientpositive/udf_between.q index eb3ccea..b22ee9c 100644 --- a/ql/src/test/queries/clientpositive/udf_between.q +++ b/ql/src/test/queries/clientpositive/udf_between.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + describe function between; describe function extended between; diff --git a/ql/src/test/queries/clientpositive/udf_bin.q b/ql/src/test/queries/clientpositive/udf_bin.q index 2b9ad62..c5a7ac1 100644 --- a/ql/src/test/queries/clientpositive/udf_bin.q +++ b/ql/src/test/queries/clientpositive/udf_bin.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION bin; DESCRIBE FUNCTION EXTENDED bin; @@ -5,7 +7,7 @@ SELECT bin(1), bin(0), bin(99992421) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Negative numbers should be treated as two's complement (64 bit). -SELECT bin(-5) FROM src LIMIT 1; +SELECT bin(-5) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_bitmap_and.q b/ql/src/test/queries/clientpositive/udf_bitmap_and.q index 7ea50da..ed7711c 100644 --- a/ql/src/test/queries/clientpositive/udf_bitmap_and.q +++ b/ql/src/test/queries/clientpositive/udf_bitmap_and.q @@ -1,11 +1,13 @@ -select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src limit 1; -select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src limit 1; +set hive.fetch.task.conversion=more; + +select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows); +select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src tablesample (1 rows); drop table bitmap_test; create table bitmap_test (a array, b array); insert overwrite table bitmap_test -select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src limit 10; +select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src tablesample (10 rows); select ewah_bitmap_and(a,b) from bitmap_test; diff --git a/ql/src/test/queries/clientpositive/udf_bitmap_empty.q b/ql/src/test/queries/clientpositive/udf_bitmap_empty.q index 88e9616..142b248 100644 --- a/ql/src/test/queries/clientpositive/udf_bitmap_empty.q +++ b/ql/src/test/queries/clientpositive/udf_bitmap_empty.q @@ -1,3 +1,5 @@ -select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src limit 1; +set hive.fetch.task.conversion=more; -select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src limit 1; +select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src tablesample (1 rows); + +select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_bitmap_or.q b/ql/src/test/queries/clientpositive/udf_bitmap_or.q index 0b71e68..00785b7 100644 --- a/ql/src/test/queries/clientpositive/udf_bitmap_or.q +++ b/ql/src/test/queries/clientpositive/udf_bitmap_or.q @@ -1,11 +1,13 @@ -select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src limit 1; -select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src limit 1; +set hive.fetch.task.conversion=more; + +select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows); +select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src tablesample (1 rows); drop table bitmap_test; create table bitmap_test (a array, b array); insert overwrite table bitmap_test -select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src limit 10; +select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src tablesample (10 rows); select ewah_bitmap_or(a,b) from bitmap_test; diff --git a/ql/src/test/queries/clientpositive/udf_case.q b/ql/src/test/queries/clientpositive/udf_case.q index 4f71e70..03f9f9f 100644 --- a/ql/src/test/queries/clientpositive/udf_case.q +++ b/ql/src/test/queries/clientpositive/udf_case.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION case; DESCRIBE FUNCTION EXTENDED case; @@ -27,7 +29,7 @@ SELECT CASE 1 WHEN 22 THEN 23 WHEN 21 THEN 24 END -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT CASE 1 WHEN 1 THEN 2 @@ -54,10 +56,10 @@ SELECT CASE 1 WHEN 22 THEN 23 WHEN 21 THEN 24 END -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- verify that short-circuiting is working correctly for CASE -- we should never get to the ELSE branch, which would raise an exception SELECT CASE 1 WHEN 1 THEN 'yo' ELSE reflect('java.lang.String', 'bogus', 1) END -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_case_thrift.q b/ql/src/test/queries/clientpositive/udf_case_thrift.q index 736bb05..2aa76f1 100644 --- a/ql/src/test/queries/clientpositive/udf_case_thrift.q +++ b/ql/src/test/queries/clientpositive/udf_case_thrift.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN SELECT CASE src_thrift.lint[0] WHEN 0 THEN src_thrift.lint[0] + 1 @@ -14,7 +16,7 @@ SELECT CASE src_thrift.lint[0] WHEN '0' THEN src_thrift.lstring ELSE NULL END)[0] -FROM src_thrift LIMIT 3; +FROM src_thrift tablesample (3 rows); SELECT CASE src_thrift.lint[0] WHEN 0 THEN src_thrift.lint[0] + 1 @@ -31,4 +33,4 @@ SELECT CASE src_thrift.lint[0] WHEN '0' THEN src_thrift.lstring ELSE NULL END)[0] -FROM src_thrift LIMIT 3; +FROM src_thrift tablesample (3 rows); diff --git a/ql/src/test/queries/clientpositive/udf_coalesce.q b/ql/src/test/queries/clientpositive/udf_coalesce.q index 48ca29c..d3c417b 100644 --- a/ql/src/test/queries/clientpositive/udf_coalesce.q +++ b/ql/src/test/queries/clientpositive/udf_coalesce.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION coalesce; DESCRIBE FUNCTION EXTENDED coalesce; @@ -20,7 +22,7 @@ SELECT COALESCE(1), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT COALESCE(1), COALESCE(1, 2), @@ -40,7 +42,7 @@ SELECT COALESCE(1), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) -FROM src LIMIT 1; +FROM src tablesample (1 rows); EXPLAIN SELECT COALESCE(src_thrift.lint[1], 999), diff --git a/ql/src/test/queries/clientpositive/udf_compare_java_string.q b/ql/src/test/queries/clientpositive/udf_compare_java_string.q index 6c12f81..c7983b8 100644 --- a/ql/src/test/queries/clientpositive/udf_compare_java_string.q +++ b/ql/src/test/queries/clientpositive/udf_compare_java_string.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + EXPLAIN CREATE TEMPORARY FUNCTION test_udf_get_java_string AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaString'; diff --git a/ql/src/test/queries/clientpositive/udf_concat.q b/ql/src/test/queries/clientpositive/udf_concat.q index 3d3c85d..e35a1cf 100644 --- a/ql/src/test/queries/clientpositive/udf_concat.q +++ b/ql/src/test/queries/clientpositive/udf_concat.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION concat; DESCRIBE FUNCTION EXTENDED concat; @@ -12,10 +14,10 @@ SELECT concat(1, 2), concat(1), concat('1234', 'abc', 'extra argument') -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- binary/mixed SELECT concat(cast('ab' as binary), cast('cd' as binary)), concat('ab', cast('cd' as binary)) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_concat_ws.q b/ql/src/test/queries/clientpositive/udf_concat_ws.q index 6a0ce20..538dfae 100644 --- a/ql/src/test/queries/clientpositive/udf_concat_ws.q +++ b/ql/src/test/queries/clientpositive/udf_concat_ws.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION concat_ws; DESCRIBE FUNCTION EXTENDED concat_ws; @@ -24,7 +26,7 @@ SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'), concat_ws('_', array('www', 'face'), array('book', 'com', '1234')), concat_ws('**', 'www', array('face'), array('book', 'com', '1234')), concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')), - concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1; + concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows); SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'), concat_ws('-', 'www', array('face', 'book', 'com'), '1234'), @@ -32,7 +34,7 @@ SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'), concat_ws('_', array('www', 'face'), array('book', 'com', '1234')), concat_ws('**', 'www', array('face'), array('book', 'com', '1234')), concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')), - concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1; + concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows); SELECT concat_ws(NULL, array('www', 'face', 'book', 'com'), '1234'), concat_ws(NULL, 'www', array('face', 'book', 'com'), '1234'), @@ -40,4 +42,4 @@ SELECT concat_ws(NULL, array('www', 'face', 'book', 'com'), '1234'), concat_ws(NULL, array('www', 'face'), array('book', 'com', '1234')), concat_ws(NULL, 'www', array('face'), array('book', 'com', '1234')), concat_ws(NULL, array('www'), 'face', array('book', 'com', '1234')), - concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1; + concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_conv.q b/ql/src/test/queries/clientpositive/udf_conv.q index 212bcfb..c6d6cf8 100644 --- a/ql/src/test/queries/clientpositive/udf_conv.q +++ b/ql/src/test/queries/clientpositive/udf_conv.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION conv; DESCRIBE FUNCTION EXTENDED conv; @@ -9,7 +11,7 @@ SELECT conv('22', 10, 10), conv('110011', 2, 16), conv('facebook', 36, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Test negative numbers. If to_base is positive, the number should be handled -- as a two's complement (64-bit) @@ -18,7 +20,7 @@ SELECT conv('1011', 2, -16), conv('-1', 10, 16), conv('-15', 10, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Test overflow. If a number is two large, the result should be -1 (if signed) -- or MAX_LONG (if unsigned) @@ -27,7 +29,7 @@ SELECT conv('9223372036854775807', 36, -16), conv('-9223372036854775807', 36, 16), conv('-9223372036854775807', 36, -16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Test with invalid input. If one of the bases is invalid, the result should -- be NULL. If there is an invalid digit in the number, the longest valid @@ -37,7 +39,7 @@ SELECT conv('131', 1, 5), conv('515', 5, 100), conv('10', -2, 2) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Perform the same tests with number arguments. @@ -45,31 +47,31 @@ SELECT conv(4521, 10, 36), conv(22, 10, 10), conv(110011, 2, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT conv(-641, 10, -10), conv(1011, 2, -16), conv(-1, 10, 16), conv(-15, 10, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT conv(9223372036854775807, 36, 16), conv(9223372036854775807, 36, -16), conv(-9223372036854775807, 36, 16), conv(-9223372036854775807, 36, -16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT conv(123455, 3, 10), conv(131, 1, 5), conv(515, 5, 100), conv('10', -2, 2) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Make sure that state is properly reset. SELECT conv(key, 10, 16), conv(key, 16, 10) -FROM src LIMIT 3; +FROM src tablesample (3 rows); diff --git a/ql/src/test/queries/clientpositive/udf_cos.q b/ql/src/test/queries/clientpositive/udf_cos.q index 7887c4c..11ef8d7 100644 --- a/ql/src/test/queries/clientpositive/udf_cos.q +++ b/ql/src/test/queries/clientpositive/udf_cos.q @@ -1,8 +1,10 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION cos; DESCRIBE FUNCTION EXTENDED cos; SELECT cos(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT cos(0.98), cos(1.57), cos(-0.5) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_degrees.q b/ql/src/test/queries/clientpositive/udf_degrees.q index 014ca1c..d5360fe 100644 --- a/ql/src/test/queries/clientpositive/udf_degrees.q +++ b/ql/src/test/queries/clientpositive/udf_degrees.q @@ -1,14 +1,16 @@ -explain -select degrees(PI()) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; + +explain +select degrees(PI()) FROM src tablesample (1 rows); -select degrees(PI()) FROM src LIMIT 1; +select degrees(PI()) FROM src tablesample (1 rows); DESCRIBE FUNCTION degrees; DESCRIBE FUNCTION EXTENDED degrees; explain -select degrees(PI()) FROM src LIMIT 1; +select degrees(PI()) FROM src tablesample (1 rows); -select degrees(PI()) FROM src LIMIT 1; +select degrees(PI()) FROM src tablesample (1 rows); DESCRIBE FUNCTION degrees; DESCRIBE FUNCTION EXTENDED degrees; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_div.q b/ql/src/test/queries/clientpositive/udf_div.q index 4229e62..b0d2844 100644 --- a/ql/src/test/queries/clientpositive/udf_div.q +++ b/ql/src/test/queries/clientpositive/udf_div.q @@ -1,4 +1,6 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION div; DESCRIBE FUNCTION EXTENDED div; -SELECT 3 DIV 2 FROM SRC LIMIT 1; +SELECT 3 DIV 2 FROM SRC tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_divide.q b/ql/src/test/queries/clientpositive/udf_divide.q index dc4b2e7..d36ba99 100644 --- a/ql/src/test/queries/clientpositive/udf_divide.q +++ b/ql/src/test/queries/clientpositive/udf_divide.q @@ -1,4 +1,6 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION /; DESCRIBE FUNCTION EXTENDED /; -SELECT 3 / 2 FROM SRC LIMIT 1; +SELECT 3 / 2 FROM SRC tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_elt.q b/ql/src/test/queries/clientpositive/udf_elt.q index c32340a..fae7649 100644 --- a/ql/src/test/queries/clientpositive/udf_elt.q +++ b/ql/src/test/queries/clientpositive/udf_elt.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION elt; DESCRIBE FUNCTION EXTENDED elt; @@ -13,7 +15,7 @@ SELECT elt(2, 'abc', 'defg'), elt(null, 'abc', 'defg'), elt(0, 'abc', 'defg'), elt(3, 'abc', 'defg') -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT elt(2, 'abc', 'defg'), elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), @@ -26,4 +28,4 @@ SELECT elt(2, 'abc', 'defg'), elt(null, 'abc', 'defg'), elt(0, 'abc', 'defg'), elt(3, 'abc', 'defg') -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_equal.q b/ql/src/test/queries/clientpositive/udf_equal.q index f5e9a7d..ea9b18b 100644 --- a/ql/src/test/queries/clientpositive/udf_equal.q +++ b/ql/src/test/queries/clientpositive/udf_equal.q @@ -1,12 +1,14 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION =; DESCRIBE FUNCTION EXTENDED =; DESCRIBE FUNCTION ==; DESCRIBE FUNCTION EXTENDED ==; -SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src LIMIT 1; +SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src tablesample (1 rows); DESCRIBE FUNCTION <=>; DESCRIBE FUNCTION EXTENDED <=>; -SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src LIMIT 1; +SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_explode.q b/ql/src/test/queries/clientpositive/udf_explode.q index 19af288..ae65164 100644 --- a/ql/src/test/queries/clientpositive/udf_explode.q +++ b/ql/src/test/queries/clientpositive/udf_explode.q @@ -1,22 +1,24 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION explode; DESCRIBE FUNCTION EXTENDED explode; -EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3; -EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol; +EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows); +EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol; -SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3; -SELECT explode(array(1,2,3)) AS (myCol) FROM src LIMIT 3; -SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol; +SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows); +SELECT explode(array(1,2,3)) AS (myCol) FROM src tablesample (1 rows); +SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol; -EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3; -EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val; +EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows); +EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val; -SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3; -SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val; +SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows); +SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val; drop table lazy_array_map; create table lazy_array_map (map_col map, array_col array); -INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src LIMIT 1; +INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src tablesample (1 rows); SELECT array_col, myCol from lazy_array_map lateral view explode(array_col) X AS myCol; SELECT map_col, myKey, myValue from lazy_array_map lateral view explode(map_col) X AS myKey, myValue; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_field.q b/ql/src/test/queries/clientpositive/udf_field.q index e995f5c..f5b3f9d 100644 --- a/ql/src/test/queries/clientpositive/udf_field.q +++ b/ql/src/test/queries/clientpositive/udf_field.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION field; DESCRIBE FUNCTION EXTENDED field; @@ -5,7 +7,7 @@ SELECT field("x", "a", "b", "c", "d"), field(NULL, "a", "b", "c", "d"), field(0, 1, 2, 3, 4) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT field("a", "a", "b", "c", "d"), @@ -13,7 +15,7 @@ SELECT field("c", "a", "b", "c", "d"), field("d", "a", "b", "c", "d"), field("d", "a", "b", NULL, "d") -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT field(1, 1, 2, 3, 4), @@ -21,7 +23,7 @@ SELECT field(3, 1, 2, 3, 4), field(4, 1, 2, 3, 4), field(4, 1, 2, NULL, 4) -FROM src LIMIT 1; +FROM src tablesample (1 rows); CREATE TABLE test_table(col1 STRING, col2 STRING) STORED AS TEXTFILE; diff --git a/ql/src/test/queries/clientpositive/udf_find_in_set.q b/ql/src/test/queries/clientpositive/udf_find_in_set.q index eac2c6e..72c65b4 100644 --- a/ql/src/test/queries/clientpositive/udf_find_in_set.q +++ b/ql/src/test/queries/clientpositive/udf_find_in_set.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION find_in_set; DESCRIBE FUNCTION EXTENDED find_in_set; @@ -6,18 +8,18 @@ FROM src1 SELECT find_in_set(src1.key,concat(src1.key,',',src1.value)); FROM src1 SELECT find_in_set(src1.key,concat(src1.key,',',src1.value)); -SELECT find_in_set('ab','ab,abc,abcde') FROM src1 LIMIT 1; -SELECT find_in_set('ab','abc,ab,bbb') FROM src1 LIMIT 1; -SELECT find_in_set('ab','def,abc,ab') FROM src1 LIMIT 1; -SELECT find_in_set('ab','abc,abd,abf') FROM src1 LIMIT 1; -SELECT find_in_set(null,'a,b,c') FROM src1 LIMIT 1; -SELECT find_in_set('a',null) FROM src1 LIMIT 1; -SELECT find_in_set('', '') FROM src1 LIMIT 1; -SELECT find_in_set('',',') FROM src1 LIMIT 1; -SELECT find_in_set('','a,,b') FROM src1 LIMIT 1; -SELECT find_in_set('','a,b,') FROM src1 LIMIT 1; -SELECT find_in_set(',','a,b,d,') FROM src1 LIMIT 1; -SELECT find_in_set('a','') FROM src1 LIMIT 1; -SELECT find_in_set('a,','a,b,c,d') FROM src1 LIMIT 1; +SELECT find_in_set('ab','ab,abc,abcde') FROM src1 tablesample (1 rows); +SELECT find_in_set('ab','abc,ab,bbb') FROM src1 tablesample (1 rows); +SELECT find_in_set('ab','def,abc,ab') FROM src1 tablesample (1 rows); +SELECT find_in_set('ab','abc,abd,abf') FROM src1 tablesample (1 rows); +SELECT find_in_set(null,'a,b,c') FROM src1 tablesample (1 rows); +SELECT find_in_set('a',null) FROM src1 tablesample (1 rows); +SELECT find_in_set('', '') FROM src1 tablesample (1 rows); +SELECT find_in_set('',',') FROM src1 tablesample (1 rows); +SELECT find_in_set('','a,,b') FROM src1 tablesample (1 rows); +SELECT find_in_set('','a,b,') FROM src1 tablesample (1 rows); +SELECT find_in_set(',','a,b,d,') FROM src1 tablesample (1 rows); +SELECT find_in_set('a','') FROM src1 tablesample (1 rows); +SELECT find_in_set('a,','a,b,c,d') FROM src1 tablesample (1 rows); SELECT * FROM src1 WHERE NOT find_in_set(key,'311,128,345,2,956')=0; diff --git a/ql/src/test/queries/clientpositive/udf_format_number.q b/ql/src/test/queries/clientpositive/udf_format_number.q index e2084cd..2504bd0 100644 --- a/ql/src/test/queries/clientpositive/udf_format_number.q +++ b/ql/src/test/queries/clientpositive/udf_format_number.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + use default; -- Test format_number() UDF @@ -7,26 +9,26 @@ DESCRIBE FUNCTION EXTENDED format_number; EXPLAIN SELECT format_number(12332.123456, 4), format_number(12332.1,4), - format_number(12332.2,0) FROM src limit 1; + format_number(12332.2,0) FROM src tablesample (1 rows); SELECT format_number(12332.123456, 4), format_number(12332.1,4), format_number(12332.2,0) -FROM src limit 1; +FROM src tablesample (1 rows); -- positive numbers SELECT format_number(0.123456789, 12), format_number(12345678.123456789, 5), format_number(1234567.123456789, 7), format_number(123456.123456789, 0) -FROM src limit 1; +FROM src tablesample (1 rows); -- negative numbers SELECT format_number(-123456.123456789, 0), format_number(-1234567.123456789, 2), format_number(-0.123456789, 15), format_number(-12345.123456789, 4) -FROM src limit 1; +FROM src tablesample (1 rows); -- zeros SELECT format_number(0.0, 4), @@ -34,7 +36,7 @@ SELECT format_number(0.0, 4), format_number(000.0000, 1), format_number(00000.0000, 1), format_number(-00.0, 4) -FROM src limit 1; +FROM src tablesample (1 rows); -- integers SELECT format_number(0, 0), @@ -42,7 +44,7 @@ SELECT format_number(0, 0), format_number(12, 2), format_number(123, 5), format_number(1234, 7) -FROM src limit 1; +FROM src tablesample (1 rows); -- long and double boundary -- 9223372036854775807 is LONG_MAX @@ -54,4 +56,4 @@ SELECT format_number(-9223372036854775807, 10), format_number(9223372036854775807, 20), format_number(4.9E-324, 324), format_number(1.7976931348623157E308, 308) -FROM src limit 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_get_json_object.q b/ql/src/test/queries/clientpositive/udf_get_json_object.q index 464f2df..05f7f5a 100644 --- a/ql/src/test/queries/clientpositive/udf_get_json_object.q +++ b/ql/src/test/queries/clientpositive/udf_get_json_object.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION get_json_object; DESCRIBE FUNCTION EXTENDED get_json_object; @@ -5,6 +7,8 @@ CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE; FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86; +set hive.fetch.task.conversion=more; + EXPLAIN SELECT get_json_object(src_json.json, '$.owner') FROM src_json; @@ -33,8 +37,8 @@ SELECT get_json_object(src_json.json, '$.fb:testid') FROM src_json; CREATE TABLE dest2(c1 STRING) STORED AS RCFILE; -INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src LIMIT 1; +INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows); SELECT * FROM dest2; -SELECT get_json_object(c1, '$.a') FROM dest2; \ No newline at end of file +SELECT get_json_object(c1, '$.a') FROM dest2; diff --git a/ql/src/test/queries/clientpositive/udf_greaterthan.q b/ql/src/test/queries/clientpositive/udf_greaterthan.q index aea110a..230bd24 100644 --- a/ql/src/test/queries/clientpositive/udf_greaterthan.q +++ b/ql/src/test/queries/clientpositive/udf_greaterthan.q @@ -1,4 +1,6 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION >; DESCRIBE FUNCTION EXTENDED >; -SELECT true>false, false>true, false>false, true>true FROM src LIMIT 1; \ No newline at end of file +SELECT true>false, false>true, false>false, true>true FROM src tablesample (1 rows); \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_greaterthanorequal.q b/ql/src/test/queries/clientpositive/udf_greaterthanorequal.q index 8de165b..025eed7 100644 --- a/ql/src/test/queries/clientpositive/udf_greaterthanorequal.q +++ b/ql/src/test/queries/clientpositive/udf_greaterthanorequal.q @@ -1,4 +1,6 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION >=; DESCRIBE FUNCTION EXTENDED >=; -SELECT true>=false, false>=true, false>=false, true>=true FROM src LIMIT 1; \ No newline at end of file +SELECT true>=false, false>=true, false>=false, true>=true FROM src tablesample (1 rows); \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_hash.q b/ql/src/test/queries/clientpositive/udf_hash.q index faf3722..5814a17 100644 --- a/ql/src/test/queries/clientpositive/udf_hash.q +++ b/ql/src/test/queries/clientpositive/udf_hash.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION hash; DESCRIBE FUNCTION EXTENDED hash; @@ -7,11 +9,11 @@ SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), hash(CAST(1.25 AS FLOAT)), hash(CAST(16.0 AS DOUBLE)), hash('400'), hash('abc'), hash(TRUE), hash(FALSE), hash(1, 2, 3) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), hash(3), hash(CAST('123456789012' AS BIGINT)), hash(CAST(1.25 AS FLOAT)), hash(CAST(16.0 AS DOUBLE)), hash('400'), hash('abc'), hash(TRUE), hash(FALSE), hash(1, 2, 3) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_hex.q b/ql/src/test/queries/clientpositive/udf_hex.q index 37e035a..0e54579 100644 --- a/ql/src/test/queries/clientpositive/udf_hex.q +++ b/ql/src/test/queries/clientpositive/udf_hex.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION hex; DESCRIBE FUNCTION EXTENDED hex; @@ -7,14 +9,14 @@ SELECT hex('Facebook'), hex('\0'), hex('qwertyuiopasdfghjkl') -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- If the argument is a number, hex should convert it to hexadecimal. SELECT hex(1), hex(0), hex(4207849477) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Negative numbers should be treated as two's complement (64 bit). -SELECT hex(-5) FROM src LIMIT 1; +SELECT hex(-5) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_hour.q b/ql/src/test/queries/clientpositive/udf_hour.q index c5c366d..b9811e6 100644 --- a/ql/src/test/queries/clientpositive/udf_hour.q +++ b/ql/src/test/queries/clientpositive/udf_hour.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION hour; DESCRIBE FUNCTION EXTENDED hour; diff --git a/ql/src/test/queries/clientpositive/udf_if.q b/ql/src/test/queries/clientpositive/udf_if.q index 4f7c8b4..d9285ff 100644 --- a/ql/src/test/queries/clientpositive/udf_if.q +++ b/ql/src/test/queries/clientpositive/udf_if.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION if; DESCRIBE FUNCTION EXTENDED if; @@ -8,7 +10,7 @@ SELECT IF(TRUE, 1, 2) AS COL1, IF(2=2, 1, NULL) AS COL4, IF(2=2, NULL, 1) AS COL5, IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT IF(TRUE, 1, 2) AS COL1, @@ -17,7 +19,7 @@ SELECT IF(TRUE, 1, 2) AS COL1, IF(2=2, 1, NULL) AS COL4, IF(2=2, NULL, 1) AS COL5, IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- Type conversions EXPLAIN @@ -25,10 +27,10 @@ SELECT IF(TRUE, CAST(128 AS SMALLINT), CAST(1 AS TINYINT)) AS COL1, IF(FALSE, 1, 1.1) AS COL2, IF(FALSE, 1, 'ABC') AS COL3, IF(FALSE, 'ABC', 12.3) AS COL4 -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT IF(TRUE, CAST(128 AS SMALLINT), CAST(1 AS TINYINT)) AS COL1, IF(FALSE, 1, 1.1) AS COL2, IF(FALSE, 1, 'ABC') AS COL3, IF(FALSE, 'ABC', 12.3) AS COL4 -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_in.q b/ql/src/test/queries/clientpositive/udf_in.q index 7577813..a7ce3c6 100644 --- a/ql/src/test/queries/clientpositive/udf_in.q +++ b/ql/src/test/queries/clientpositive/udf_in.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + SELECT 1 IN (1, 2, 3), 4 IN (1, 2, 3), array(1,2,3) IN (array(1,2,3)), @@ -8,6 +10,6 @@ SELECT 1 IN (1, 2, 3), 1 IN (1, 2, 3) OR false IN(false), NULL IN (1, 2, 3), 4 IN (1, 2, 3, NULL), - (1+3) IN (5, 6, (1+2) + 1) FROM src LIMIT 1; + (1+3) IN (5, 6, (1+2) + 1) FROM src tablesample (1 rows); SELECT key FROM src WHERE key IN ("238", 86); \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_inline.q b/ql/src/test/queries/clientpositive/udf_inline.q index 39006f4..95d55f7 100644 --- a/ql/src/test/queries/clientpositive/udf_inline.q +++ b/ql/src/test/queries/clientpositive/udf_inline.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + describe function inline; explain SELECT inline( diff --git a/ql/src/test/queries/clientpositive/udf_instr.q b/ql/src/test/queries/clientpositive/udf_instr.q index 20ed8e4..790a104 100644 --- a/ql/src/test/queries/clientpositive/udf_instr.q +++ b/ql/src/test/queries/clientpositive/udf_instr.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION instr; DESCRIBE FUNCTION EXTENDED instr; @@ -15,7 +17,7 @@ SELECT instr('abcd', 'abc'), instr(CAST(16.0 AS DOUBLE), '.0'), instr(null, 'abc'), instr('abcd', null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT instr('abcd', 'abc'), instr('abcabc', 'ccc'), @@ -30,4 +32,4 @@ SELECT instr('abcd', 'abc'), instr(CAST(16.0 AS DOUBLE), '.0'), instr(null, 'abc'), instr('abcd', null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q b/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q index d1569cc..efb834e 100644 --- a/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q +++ b/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION isnull; DESCRIBE FUNCTION EXTENDED isnull; diff --git a/ql/src/test/queries/clientpositive/udf_java_method.q b/ql/src/test/queries/clientpositive/udf_java_method.q index 2f28be1..51280b2 100644 --- a/ql/src/test/queries/clientpositive/udf_java_method.q +++ b/ql/src/test/queries/clientpositive/udf_java_method.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION java_method; DESCRIBE FUNCTION EXTENDED java_method; @@ -11,7 +13,7 @@ SELECT java_method("java.lang.String", "valueOf", 1), java_method("java.lang.Math", "round", 2.5), java_method("java.lang.Math", "exp", 1.0), java_method("java.lang.Math", "floor", 1.9) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT java_method("java.lang.String", "valueOf", 1), @@ -21,5 +23,5 @@ SELECT java_method("java.lang.String", "valueOf", 1), java_method("java.lang.Math", "round", 2.5), java_method("java.lang.Math", "exp", 1.0), java_method("java.lang.Math", "floor", 1.9) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_length.q b/ql/src/test/queries/clientpositive/udf_length.q index b843079..4ebaf0b 100644 --- a/ql/src/test/queries/clientpositive/udf_length.q +++ b/ql/src/test/queries/clientpositive/udf_length.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION length; DESCRIBE FUNCTION EXTENDED length; diff --git a/ql/src/test/queries/clientpositive/udf_lessthan.q b/ql/src/test/queries/clientpositive/udf_lessthan.q index a257700..0332677 100644 --- a/ql/src/test/queries/clientpositive/udf_lessthan.q +++ b/ql/src/test/queries/clientpositive/udf_lessthan.q @@ -1,4 +1,6 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION <; DESCRIBE FUNCTION EXTENDED <; -SELECT true; DESCRIBE FUNCTION EXTENDED <>; diff --git a/ql/src/test/queries/clientpositive/udf_notop.q b/ql/src/test/queries/clientpositive/udf_notop.q index 8839654..dceab7e 100644 --- a/ql/src/test/queries/clientpositive/udf_notop.q +++ b/ql/src/test/queries/clientpositive/udf_notop.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + SELECT 1 NOT IN (1, 2, 3), 4 NOT IN (1, 2, 3), 1 = 2 NOT IN (true, false), @@ -7,4 +9,4 @@ SELECT 1 NOT IN (1, 2, 3), "abc" NOT RLIKE "^bc", "abc" NOT REGEXP "^ab", "abc" NOT REGEXP "^bc", - 1 IN (1, 2) AND "abc" NOT LIKE "bc%" FROM src LIMIT 1; + 1 IN (1, 2) AND "abc" NOT LIKE "bc%" FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_nvl.q b/ql/src/test/queries/clientpositive/udf_nvl.q index 0133b4b..9716257 100644 --- a/ql/src/test/queries/clientpositive/udf_nvl.q +++ b/ql/src/test/queries/clientpositive/udf_nvl.q @@ -1,3 +1,4 @@ +set hive.fetch.task.conversion=more; DESCRIBE FUNCTION nvl; DESCRIBE FUNCTION EXTENDED nvl; @@ -5,9 +6,9 @@ DESCRIBE FUNCTION EXTENDED nvl; EXPLAIN SELECT NVL( 1 , 2 ) AS COL1, NVL( NULL, 5 ) AS COL2 -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT NVL( 1 , 2 ) AS COL1, NVL( NULL, 5 ) AS COL2 -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_pmod.q b/ql/src/test/queries/clientpositive/udf_pmod.q index c5ebb6d..d42a2f3 100644 --- a/ql/src/test/queries/clientpositive/udf_pmod.q +++ b/ql/src/test/queries/clientpositive/udf_pmod.q @@ -1,20 +1,22 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION pmod; DESCRIBE FUNCTION EXTENDED pmod; SELECT pmod(null, null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT pmod(-100,9), pmod(-50,101), pmod(-1000,29) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT pmod(100,19), pmod(50,125), pmod(300,15) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src LIMIT 1; -SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src LIMIT 1; -SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src LIMIT 1; +SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src tablesample (1 rows); +SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src tablesample (1 rows); +SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src tablesample (1 rows); -SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src LIMIT 1; -SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src LIMIT 1; -SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src LIMIT 1; +SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src tablesample (1 rows); +SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src tablesample (1 rows); +SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_printf.q b/ql/src/test/queries/clientpositive/udf_printf.q index 99e89cc..ebc0f99 100644 --- a/ql/src/test/queries/clientpositive/udf_printf.q +++ b/ql/src/test/queries/clientpositive/udf_printf.q @@ -4,21 +4,23 @@ use default; DESCRIBE FUNCTION printf; DESCRIBE FUNCTION EXTENDED printf; +set hive.fetch.task.conversion=more; + EXPLAIN -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1; +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows); -- Test Primitive Types -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1; -SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1; +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows); +SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src tablesample (1 rows); -- Test NULL Values -SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1; +SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src tablesample (1 rows); -- Test Timestamp create table timestamp_udf (t timestamp); -from src +from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1; + select '2011-05-06 07:08:09.1234567'; select printf("timestamp: %s", t) from timestamp_udf; drop table timestamp_udf; diff --git a/ql/src/test/queries/clientpositive/udf_radians.q b/ql/src/test/queries/clientpositive/udf_radians.q index 001d1cf..19242bd 100644 --- a/ql/src/test/queries/clientpositive/udf_radians.q +++ b/ql/src/test/queries/clientpositive/udf_radians.q @@ -1,16 +1,18 @@ -explain -select radians(57.2958) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; + +explain +select radians(57.2958) FROM src tablesample (1 rows); -select radians(57.2958) FROM src LIMIT 1; -select radians(143.2394) FROM src LIMIT 1; +select radians(57.2958) FROM src tablesample (1 rows); +select radians(143.2394) FROM src tablesample (1 rows); DESCRIBE FUNCTION radians; DESCRIBE FUNCTION EXTENDED radians; explain -select radians(57.2958) FROM src LIMIT 1; +select radians(57.2958) FROM src tablesample (1 rows); -select radians(57.2958) FROM src LIMIT 1; -select radians(143.2394) FROM src LIMIT 1; +select radians(57.2958) FROM src tablesample (1 rows); +select radians(143.2394) FROM src tablesample (1 rows); DESCRIBE FUNCTION radians; DESCRIBE FUNCTION EXTENDED radians; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_reflect.q b/ql/src/test/queries/clientpositive/udf_reflect.q index f357ff5..cef1e4a 100644 --- a/ql/src/test/queries/clientpositive/udf_reflect.q +++ b/ql/src/test/queries/clientpositive/udf_reflect.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION reflect; DESCRIBE FUNCTION EXTENDED reflect; @@ -10,7 +12,7 @@ SELECT reflect("java.lang.String", "valueOf", 1), reflect("java.lang.Math", "exp", 1.0), reflect("java.lang.Math", "floor", 1.9), reflect("java.lang.Integer", "valueOf", key, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT reflect("java.lang.String", "valueOf", 1), @@ -21,4 +23,4 @@ SELECT reflect("java.lang.String", "valueOf", 1), reflect("java.lang.Math", "exp", 1.0), reflect("java.lang.Math", "floor", 1.9), reflect("java.lang.Integer", "valueOf", key, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_reflect2.q b/ql/src/test/queries/clientpositive/udf_reflect2.q index 9ffd755..a65294b 100644 --- a/ql/src/test/queries/clientpositive/udf_reflect2.q +++ b/ql/src/test/queries/clientpositive/udf_reflect2.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION reflect2; DESCRIBE FUNCTION EXTENDED reflect2; diff --git a/ql/src/test/queries/clientpositive/udf_regexp.q b/ql/src/test/queries/clientpositive/udf_regexp.q index 3aee109..12b685b 100644 --- a/ql/src/test/queries/clientpositive/udf_regexp.q +++ b/ql/src/test/queries/clientpositive/udf_regexp.q @@ -1,6 +1,8 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION regexp; DESCRIBE FUNCTION EXTENDED regexp; SELECT 'fofo' REGEXP '^fo', 'fo\no' REGEXP '^fo\no$', 'Bn' REGEXP '^Ba*n', 'afofo' REGEXP 'fo', 'afofo' REGEXP '^fo', 'Baan' REGEXP '^Ba?n', 'axe' REGEXP 'pi|apa', 'pip' REGEXP '^(pi)*$' -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_repeat.q b/ql/src/test/queries/clientpositive/udf_repeat.q index 162085f..91474ba 100644 --- a/ql/src/test/queries/clientpositive/udf_repeat.q +++ b/ql/src/test/queries/clientpositive/udf_repeat.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION repeat; DESCRIBE FUNCTION EXTENDED repeat; @@ -6,11 +8,11 @@ EXPLAIN SELECT repeat("", 4), repeat("asd", 0), repeat("asdf", -1) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT repeat("Facebook", 3), repeat("", 4), repeat("asd", 0), repeat("asdf", -1) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_round.q b/ql/src/test/queries/clientpositive/udf_round.q index 18ebba8..1a87b78 100644 --- a/ql/src/test/queries/clientpositive/udf_round.q +++ b/ql/src/test/queries/clientpositive/udf_round.q @@ -1,15 +1,17 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION round; DESCRIBE FUNCTION EXTENDED round; SELECT round(null), round(null, 0), round(125, null), round(1.0/0.0, 0), round(power(-1.0,0.5), 0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT round(55555), round(55555, 0), round(55555, 1), round(55555, 2), round(55555, 3), round(55555, -1), round(55555, -2), round(55555, -3), round(55555, -4), round(55555, -5), round(55555, -6), round(55555, -7), round(55555, -8) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT round(125.315), round(125.315, 0), @@ -18,7 +20,7 @@ SELECT round(-125.315), round(-125.315, 0), round(-125.315, 1), round(-125.315, 2), round(-125.315, 3), round(-125.315, 4), round(-125.315, -1), round(-125.315, -2), round(-125.315, -3), round(-125.315, -4) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT round(3.141592653589793, -15), round(3.141592653589793, -16), @@ -38,7 +40,7 @@ SELECT round(3.141592653589793, 12), round(3.141592653589793, 13), round(3.141592653589793, 13), round(3.141592653589793, 14), round(3.141592653589793, 15), round(3.141592653589793, 16) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_round_2.q b/ql/src/test/queries/clientpositive/udf_round_2.q index 6be3085..43988c1 100644 --- a/ql/src/test/queries/clientpositive/udf_round_2.q +++ b/ql/src/test/queries/clientpositive/udf_round_2.q @@ -1,8 +1,10 @@ --- test for NaN (not-a-number) +set hive.fetch.task.conversion=more; + +-- test for NaN (not-a-number) create table tstTbl1(n double); insert overwrite table tstTbl1 -select 'NaN' from src limit 1; +select 'NaN' from src tablesample (1 rows); select * from tstTbl1; @@ -10,4 +12,4 @@ select round(n, 1) from tstTbl1; select round(n) from tstTbl1; -- test for Infinity -select round(1/0), round(1/0, 2), round(1.0/0.0), round(1.0/0.0, 2) from src limit 1; +select round(1/0), round(1/0, 2), round(1.0/0.0), round(1.0/0.0, 2) from src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_round_3.q b/ql/src/test/queries/clientpositive/udf_round_3.q index 50a1f44..f042b6f 100644 --- a/ql/src/test/queries/clientpositive/udf_round_3.q +++ b/ql/src/test/queries/clientpositive/udf_round_3.q @@ -1,14 +1,16 @@ +set hive.fetch.task.conversion=more; + -- test for TINYINT -select round(-128), round(127), round(0) from src limit 1; +select round(-128), round(127), round(0) from src tablesample (1 rows); -- test for SMALLINT -select round(-32768), round(32767), round(-129), round(128) from src limit 1; +select round(-32768), round(32767), round(-129), round(128) from src tablesample (1 rows); -- test for INT -select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as INT)), round(-32769), round(32768) from src limit 1; +select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as INT)), round(-32769), round(32768) from src tablesample (1 rows); -- test for BIGINT -select round(cast(negative(pow(2, 63)) as BIGINT)), round(cast((pow(2, 63) - 1) as BIGINT)), round(cast(negative(pow(2, 31) + 1) as BIGINT)), round(cast(pow(2, 31) as BIGINT)) from src limit 1; +select round(cast(negative(pow(2, 63)) as BIGINT)), round(cast((pow(2, 63) - 1) as BIGINT)), round(cast(negative(pow(2, 31) + 1) as BIGINT)), round(cast(pow(2, 31) as BIGINT)) from src tablesample (1 rows); -- test for DOUBLE -select round(126.1), round(126.7), round(32766.1), round(32766.7) from src limit 1; +select round(126.1), round(126.7), round(32766.1), round(32766.7) from src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_rpad.q b/ql/src/test/queries/clientpositive/udf_rpad.q index 01e5fbd..4ee69e8 100644 --- a/ql/src/test/queries/clientpositive/udf_rpad.q +++ b/ql/src/test/queries/clientpositive/udf_rpad.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION rpad; DESCRIBE FUNCTION EXTENDED rpad; @@ -5,10 +7,10 @@ EXPLAIN SELECT rpad('hi', 1, '?'), rpad('hi', 5, '.'), rpad('hi', 6, '123') -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT rpad('hi', 1, '?'), rpad('hi', 5, '.'), rpad('hi', 6, '123') -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_second.q b/ql/src/test/queries/clientpositive/udf_second.q index 1943188..f63426d 100644 --- a/ql/src/test/queries/clientpositive/udf_second.q +++ b/ql/src/test/queries/clientpositive/udf_second.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION second; DESCRIBE FUNCTION EXTENDED second; diff --git a/ql/src/test/queries/clientpositive/udf_sign.q b/ql/src/test/queries/clientpositive/udf_sign.q index abceb34..b1602e8 100644 --- a/ql/src/test/queries/clientpositive/udf_sign.q +++ b/ql/src/test/queries/clientpositive/udf_sign.q @@ -1,20 +1,22 @@ -explain -select sign(0) FROM src LIMIT 1; -select sign(0) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; + +explain +select sign(0) FROM src tablesample (1 rows); +select sign(0) FROM src tablesample (1 rows); -select sign(-45) FROM src LIMIT 1; +select sign(-45) FROM src tablesample (1 rows); -select sign(46) FROM src LIMIT 1; +select sign(46) FROM src tablesample (1 rows); DESCRIBE FUNCTION sign; DESCRIBE FUNCTION EXTENDED sign; explain -select sign(0) FROM src LIMIT 1; -select sign(0) FROM src LIMIT 1; +select sign(0) FROM src tablesample (1 rows); +select sign(0) FROM src tablesample (1 rows); -select sign(-45) FROM src LIMIT 1; +select sign(-45) FROM src tablesample (1 rows); -select sign(46) FROM src LIMIT 1; +select sign(46) FROM src tablesample (1 rows); DESCRIBE FUNCTION sign; DESCRIBE FUNCTION EXTENDED sign; diff --git a/ql/src/test/queries/clientpositive/udf_sin.q b/ql/src/test/queries/clientpositive/udf_sin.q index abb7cac..79745be 100644 --- a/ql/src/test/queries/clientpositive/udf_sin.q +++ b/ql/src/test/queries/clientpositive/udf_sin.q @@ -1,8 +1,10 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION sin; DESCRIBE FUNCTION EXTENDED sin; SELECT sin(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT sin(0.98), sin(1.57), sin(-0.5) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_size.q b/ql/src/test/queries/clientpositive/udf_size.q index 8aaa68a..f6f76a3 100644 --- a/ql/src/test/queries/clientpositive/udf_size.q +++ b/ql/src/test/queries/clientpositive/udf_size.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION size; DESCRIBE FUNCTION EXTENDED size; diff --git a/ql/src/test/queries/clientpositive/udf_sort_array.q b/ql/src/test/queries/clientpositive/udf_sort_array.q index ef09732..cdcd87b 100644 --- a/ql/src/test/queries/clientpositive/udf_sort_array.q +++ b/ql/src/test/queries/clientpositive/udf_sort_array.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + use default; -- Test sort_array() UDF @@ -6,16 +8,16 @@ DESCRIBE FUNCTION EXTENDED sort_array; -- Evaluate function against STRING valued keys EXPLAIN -SELECT sort_array(array("b", "d", "c", "a")) FROM src LIMIT 1; +SELECT sort_array(array("b", "d", "c", "a")) FROM src tablesample (1 rows); -SELECT sort_array(array("f", "a", "g", "c", "b", "d", "e")) FROM src LIMIT 1; -SELECT sort_array(sort_array(array("hadoop distributed file system", "enterprise databases", "hadoop map-reduce"))) FROM src LIMIT 1; +SELECT sort_array(array("f", "a", "g", "c", "b", "d", "e")) FROM src tablesample (1 rows); +SELECT sort_array(sort_array(array("hadoop distributed file system", "enterprise databases", "hadoop map-reduce"))) FROM src tablesample (1 rows); -- Evaluate function against INT valued keys -SELECT sort_array(array(2, 9, 7, 3, 5, 4, 1, 6, 8)) FROM src LIMIT 1; +SELECT sort_array(array(2, 9, 7, 3, 5, 4, 1, 6, 8)) FROM src tablesample (1 rows); -- Evaluate function against FLOAT valued keys -SELECT sort_array(sort_array(array(2.333, 9, 1.325, 2.003, 0.777, -3.445, 1))) FROM src LIMIT 1; +SELECT sort_array(sort_array(array(2.333, 9, 1.325, 2.003, 0.777, -3.445, 1))) FROM src tablesample (1 rows); -- Test it against data in a table. CREATE TABLE dest1 ( diff --git a/ql/src/test/queries/clientpositive/udf_space.q b/ql/src/test/queries/clientpositive/udf_space.q index cf6466f..cc616f7 100644 --- a/ql/src/test/queries/clientpositive/udf_space.q +++ b/ql/src/test/queries/clientpositive/udf_space.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION space; DESCRIBE FUNCTION EXTENDED space; @@ -7,7 +9,7 @@ EXPLAIN SELECT space(1), space(-1), space(-100) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT length(space(10)), @@ -15,7 +17,7 @@ SELECT length(space(1)), length(space(-1)), length(space(-100)) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT space(10), @@ -23,5 +25,5 @@ SELECT space(1), space(-1), space(-100) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_split.q b/ql/src/test/queries/clientpositive/udf_split.q index f799017..55919ea 100644 --- a/ql/src/test/queries/clientpositive/udf_split.q +++ b/ql/src/test/queries/clientpositive/udf_split.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION split; DESCRIBE FUNCTION EXTENDED split; @@ -6,11 +8,11 @@ EXPLAIN SELECT split('oneAtwoBthreeC', '[ABC]'), split('', '.'), split(50401020, 0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT split('a b c', ' '), split('oneAtwoBthreeC', '[ABC]'), split('', '.'), split(50401020, 0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_struct.q b/ql/src/test/queries/clientpositive/udf_struct.q index 3ee19c8..ee2135b 100644 --- a/ql/src/test/queries/clientpositive/udf_struct.q +++ b/ql/src/test/queries/clientpositive/udf_struct.q @@ -1,9 +1,11 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION struct; DESCRIBE FUNCTION EXTENDED struct; EXPLAIN SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_substr.q b/ql/src/test/queries/clientpositive/udf_substr.q index 32757be..2d04f90 100644 --- a/ql/src/test/queries/clientpositive/udf_substr.q +++ b/ql/src/test/queries/clientpositive/udf_substr.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION substr; DESCRIBE FUNCTION EXTENDED substr; @@ -5,7 +7,7 @@ SELECT substr(null, 1), substr(null, 1, 1), substr('ABC', null), substr('ABC', null, 1), substr('ABC', 1, null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT substr('ABC', 1, 0), substr('ABC', 1, -1), substr('ABC', 2, -100), @@ -14,7 +16,7 @@ SELECT substr('ABC', 100), substr('ABC', 100, 100), substr('ABC', -100), substr('ABC', -100, 100), substr('ABC', 2147483647), substr('ABC', 2147483647, 2147483647) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT substr('ABCDEFG', 3, 4), substr('ABCDEFG', -5, 4), @@ -22,7 +24,7 @@ SELECT substr('ABC', 0), substr('ABC', 1), substr('ABC', 2), substr('ABC', 3), substr('ABC', 1, 2147483647), substr('ABC', 2, 2147483647), substr('A', 0), substr('A', 1), substr('A', -1) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT substr('ABC', 0, 1), substr('ABC', 0, 2), substr('ABC', 0, 3), substr('ABC', 0, 4), @@ -30,14 +32,14 @@ SELECT substr('ABC', 2, 1), substr('ABC', 2, 2), substr('ABC', 2, 3), substr('ABC', 2, 4), substr('ABC', 3, 1), substr('ABC', 3, 2), substr('ABC', 3, 3), substr('ABC', 3, 4), substr('ABC', 4, 1) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT substr('ABC', -1, 1), substr('ABC', -1, 2), substr('ABC', -1, 3), substr('ABC', -1, 4), substr('ABC', -2, 1), substr('ABC', -2, 2), substr('ABC', -2, 3), substr('ABC', -2, 4), substr('ABC', -3, 1), substr('ABC', -3, 2), substr('ABC', -3, 3), substr('ABC', -3, 4), substr('ABC', -4, 1) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- substring() is a synonim of substr(), so just perform some basic tests SELECT @@ -46,7 +48,7 @@ SELECT substring('ABC', 0), substring('ABC', 1), substring('ABC', 2), substring('ABC', 3), substring('ABC', 1, 2147483647), substring('ABC', 2, 2147483647), substring('A', 0), substring('A', 1), substring('A', -1) -FROM src LIMIT 1; +FROM src tablesample (1 rows); -- test for binary substr SELECT @@ -63,7 +65,7 @@ SELECT substr(ABC, -3, 1), substr(ABC, -3, 2), substr(ABC, -3, 3), substr(ABC, -3, 4), substr(ABC, -4, 1) FROM ( - select CAST(concat(substr(value, 1, 0), 'ABC') as BINARY) as ABC from src LIMIT 1 + select CAST(concat(substr(value, 1, 0), 'ABC') as BINARY) as ABC from src tablesample (1 rows) ) X; -- test UTF-8 substr @@ -72,4 +74,4 @@ SELECT substr("abc 玩", 5), substr("abc 玩玩玩 abc", 5), substr("abc 玩玩玩 abc", 5, 3) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_tan.q b/ql/src/test/queries/clientpositive/udf_tan.q index f103da9..3980fe8 100644 --- a/ql/src/test/queries/clientpositive/udf_tan.q +++ b/ql/src/test/queries/clientpositive/udf_tan.q @@ -1,16 +1,18 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION tan; DESCRIBE FUNCTION EXTENDED tan; SELECT tan(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT tan(1), tan(6), tan(-1.0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); DESCRIBE FUNCTION tan; DESCRIBE FUNCTION EXTENDED tan; SELECT tan(null) -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT tan(1), tan(6), tan(-1.0) -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_testlength.q b/ql/src/test/queries/clientpositive/udf_testlength.q index 322a061..c94a521 100644 --- a/ql/src/test/queries/clientpositive/udf_testlength.q +++ b/ql/src/test/queries/clientpositive/udf_testlength.q @@ -1,12 +1,10 @@ +set hive.fetch.task.conversion=more; + EXPLAIN CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength'; CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength'; -CREATE TABLE dest1(len INT); - -FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value); - -SELECT dest1.* FROM dest1; +SELECT testlength(src.value) FROM src; DROP TEMPORARY FUNCTION testlength; diff --git a/ql/src/test/queries/clientpositive/udf_testlength2.q b/ql/src/test/queries/clientpositive/udf_testlength2.q index 6de2709..27e46c2 100644 --- a/ql/src/test/queries/clientpositive/udf_testlength2.q +++ b/ql/src/test/queries/clientpositive/udf_testlength2.q @@ -1,12 +1,10 @@ +set hive.fetch.task.conversion=more; + EXPLAIN CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2'; CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2'; -CREATE TABLE dest1(len INT); - -FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength2(src.value); - -SELECT dest1.* FROM dest1; +SELECT testlength2(src.value) FROM src; DROP TEMPORARY FUNCTION testlength2; diff --git a/ql/src/test/queries/clientpositive/udf_to_boolean.q b/ql/src/test/queries/clientpositive/udf_to_boolean.q index ca23f71..8bea7ab 100644 --- a/ql/src/test/queries/clientpositive/udf_to_boolean.q +++ b/ql/src/test/queries/clientpositive/udf_to_boolean.q @@ -1,44 +1,46 @@ --- 'true' cases: +set hive.fetch.task.conversion=more; -SELECT CAST(CAST(1 AS TINYINT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(2 AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(-4 AS INT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(-444 AS BIGINT) AS BOOLEAN) FROM src LIMIT 1; +-- 'true' cases: -SELECT CAST(CAST(7.0 AS FLOAT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(-8.0 AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(-99.0 AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(1 AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(2 AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(-4 AS INT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(-444 AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST('Foo' AS STRING) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(7.0 AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(-8.0 AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(-99.0 AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST('2011-05-06 07:08:09' as timestamp) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST('Foo' AS STRING) AS BOOLEAN) FROM src tablesample (1 rows); + +SELECT CAST(CAST('2011-05-06 07:08:09' as timestamp) AS BOOLEAN) FROM src tablesample (1 rows); -- 'false' cases: -SELECT CAST(CAST(0 AS TINYINT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(0 AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(0 AS INT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(0 AS BIGINT) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(0 AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(0 AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(0 AS INT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(0 AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST(0.0 AS FLOAT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(0.0 AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(0.0 AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(0.0 AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(0.0 AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(0.0 AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST('' AS STRING) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST('' AS STRING) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST(0 as timestamp) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(0 as timestamp) AS BOOLEAN) FROM src tablesample (1 rows); -- 'NULL' cases: -SELECT CAST(NULL AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(NULL AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST(NULL AS TINYINT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(NULL AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(NULL AS INT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(NULL AS BIGINT) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(NULL AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(NULL AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(NULL AS INT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(NULL AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST(NULL AS FLOAT) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(NULL AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(NULL AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(NULL AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(NULL AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(NULL AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows); -SELECT CAST(CAST(NULL AS STRING) AS BOOLEAN) FROM src LIMIT 1; -SELECT CAST(CAST(NULL as timestamp) AS BOOLEAN) FROM src LIMIT 1; +SELECT CAST(CAST(NULL AS STRING) AS BOOLEAN) FROM src tablesample (1 rows); +SELECT CAST(CAST(NULL as timestamp) AS BOOLEAN) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_byte.q b/ql/src/test/queries/clientpositive/udf_to_byte.q index ded930d..aa0a250 100644 --- a/ql/src/test/queries/clientpositive/udf_to_byte.q +++ b/ql/src/test/queries/clientpositive/udf_to_byte.q @@ -1,15 +1,17 @@ --- Conversion of main primitive types to Byte type: -SELECT CAST(NULL AS TINYINT) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; -SELECT CAST(TRUE AS TINYINT) FROM src LIMIT 1; +-- Conversion of main primitive types to Byte type: +SELECT CAST(NULL AS TINYINT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-18 AS SMALLINT) AS TINYINT) FROM src LIMIT 1; -SELECT CAST(-129 AS TINYINT) FROM src LIMIT 1; -SELECT CAST(CAST(-1025 AS BIGINT) AS TINYINT) FROM src LIMIT 1; +SELECT CAST(TRUE AS TINYINT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-3.14 AS DOUBLE) AS TINYINT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS FLOAT) AS TINYINT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS DECIMAL) AS TINYINT) FROM src LIMIT 1; +SELECT CAST(CAST(-18 AS SMALLINT) AS TINYINT) FROM src tablesample (1 rows); +SELECT CAST(-129 AS TINYINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-1025 AS BIGINT) AS TINYINT) FROM src tablesample (1 rows); -SELECT CAST('-38' AS TINYINT) FROM src LIMIT 1; +SELECT CAST(CAST(-3.14 AS DOUBLE) AS TINYINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS FLOAT) AS TINYINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS DECIMAL) AS TINYINT) FROM src tablesample (1 rows); + +SELECT CAST('-38' AS TINYINT) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_double.q b/ql/src/test/queries/clientpositive/udf_to_double.q index e9ae4d9..005ec9d 100644 --- a/ql/src/test/queries/clientpositive/udf_to_double.q +++ b/ql/src/test/queries/clientpositive/udf_to_double.q @@ -1,15 +1,17 @@ --- Conversion of main primitive types to Double type: -SELECT CAST(NULL AS DOUBLE) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; -SELECT CAST(TRUE AS DOUBLE) FROM src LIMIT 1; +-- Conversion of main primitive types to Double type: +SELECT CAST(NULL AS DOUBLE) FROM src tablesample (1 rows); -SELECT CAST(CAST(-7 AS TINYINT) AS DOUBLE) FROM src LIMIT 1; -SELECT CAST(CAST(-18 AS SMALLINT) AS DOUBLE) FROM src LIMIT 1; -SELECT CAST(-129 AS DOUBLE) FROM src LIMIT 1; -SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src LIMIT 1; +SELECT CAST(TRUE AS DOUBLE) FROM src tablesample (1 rows); -SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src LIMIT 1; +SELECT CAST(CAST(-7 AS TINYINT) AS DOUBLE) FROM src tablesample (1 rows); +SELECT CAST(CAST(-18 AS SMALLINT) AS DOUBLE) FROM src tablesample (1 rows); +SELECT CAST(-129 AS DOUBLE) FROM src tablesample (1 rows); +SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src tablesample (1 rows); -SELECT CAST('-38.14' AS DOUBLE) FROM src LIMIT 1; +SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src tablesample (1 rows); + +SELECT CAST('-38.14' AS DOUBLE) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_float.q b/ql/src/test/queries/clientpositive/udf_to_float.q index efcb0ae..95671f1 100644 --- a/ql/src/test/queries/clientpositive/udf_to_float.q +++ b/ql/src/test/queries/clientpositive/udf_to_float.q @@ -1,15 +1,17 @@ --- Conversion of main primitive types to Float type: -SELECT CAST(NULL AS FLOAT) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; -SELECT CAST(TRUE AS FLOAT) FROM src LIMIT 1; +-- Conversion of main primitive types to Float type: +SELECT CAST(NULL AS FLOAT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-7 AS TINYINT) AS FLOAT) FROM src LIMIT 1; -SELECT CAST(CAST(-18 AS SMALLINT) AS FLOAT) FROM src LIMIT 1; -SELECT CAST(-129 AS FLOAT) FROM src LIMIT 1; -SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src LIMIT 1; +SELECT CAST(TRUE AS FLOAT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src LIMIT 1; +SELECT CAST(CAST(-7 AS TINYINT) AS FLOAT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-18 AS SMALLINT) AS FLOAT) FROM src tablesample (1 rows); +SELECT CAST(-129 AS FLOAT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src tablesample (1 rows); -SELECT CAST('-38.14' AS FLOAT) FROM src LIMIT 1; +SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src tablesample (1 rows); + +SELECT CAST('-38.14' AS FLOAT) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_long.q b/ql/src/test/queries/clientpositive/udf_to_long.q index 45dc6f8..706411a 100644 --- a/ql/src/test/queries/clientpositive/udf_to_long.q +++ b/ql/src/test/queries/clientpositive/udf_to_long.q @@ -1,15 +1,17 @@ --- Conversion of main primitive types to Long type: -SELECT CAST(NULL AS BIGINT) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; -SELECT CAST(TRUE AS BIGINT) FROM src LIMIT 1; +-- Conversion of main primitive types to Long type: +SELECT CAST(NULL AS BIGINT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-7 AS TINYINT) AS BIGINT) FROM src LIMIT 1; -SELECT CAST(CAST(-18 AS SMALLINT) AS BIGINT) FROM src LIMIT 1; -SELECT CAST(-129 AS BIGINT) FROM src LIMIT 1; +SELECT CAST(TRUE AS BIGINT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-3.14 AS DOUBLE) AS BIGINT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS FLOAT) AS BIGINT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS DECIMAL) AS BIGINT) FROM src LIMIT 1; +SELECT CAST(CAST(-7 AS TINYINT) AS BIGINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-18 AS SMALLINT) AS BIGINT) FROM src tablesample (1 rows); +SELECT CAST(-129 AS BIGINT) FROM src tablesample (1 rows); -SELECT CAST('-38' AS BIGINT) FROM src LIMIT 1; +SELECT CAST(CAST(-3.14 AS DOUBLE) AS BIGINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS FLOAT) AS BIGINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS DECIMAL) AS BIGINT) FROM src tablesample (1 rows); + +SELECT CAST('-38' AS BIGINT) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_short.q b/ql/src/test/queries/clientpositive/udf_to_short.q index 7d843c1..5cc4e57 100644 --- a/ql/src/test/queries/clientpositive/udf_to_short.q +++ b/ql/src/test/queries/clientpositive/udf_to_short.q @@ -1,15 +1,17 @@ --- Conversion of main primitive types to Short type: -SELECT CAST(NULL AS SMALLINT) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; -SELECT CAST(TRUE AS SMALLINT) FROM src LIMIT 1; +-- Conversion of main primitive types to Short type: +SELECT CAST(NULL AS SMALLINT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-18 AS TINYINT) AS SMALLINT) FROM src LIMIT 1; -SELECT CAST(-129 AS SMALLINT) FROM src LIMIT 1; -SELECT CAST(CAST(-1025 AS BIGINT) AS SMALLINT) FROM src LIMIT 1; +SELECT CAST(TRUE AS SMALLINT) FROM src tablesample (1 rows); -SELECT CAST(CAST(-3.14 AS DOUBLE) AS SMALLINT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS FLOAT) AS SMALLINT) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS DECIMAL) AS SMALLINT) FROM src LIMIT 1; +SELECT CAST(CAST(-18 AS TINYINT) AS SMALLINT) FROM src tablesample (1 rows); +SELECT CAST(-129 AS SMALLINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-1025 AS BIGINT) AS SMALLINT) FROM src tablesample (1 rows); -SELECT CAST('-38' AS SMALLINT) FROM src LIMIT 1; +SELECT CAST(CAST(-3.14 AS DOUBLE) AS SMALLINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS FLOAT) AS SMALLINT) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS DECIMAL) AS SMALLINT) FROM src tablesample (1 rows); + +SELECT CAST('-38' AS SMALLINT) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_string.q b/ql/src/test/queries/clientpositive/udf_to_string.q index 01ae2d6..ac4b524 100644 --- a/ql/src/test/queries/clientpositive/udf_to_string.q +++ b/ql/src/test/queries/clientpositive/udf_to_string.q @@ -1,16 +1,18 @@ --- Conversion of main primitive types to String type: -SELECT CAST(NULL AS STRING) FROM src LIMIT 1; +set hive.fetch.task.conversion=more; -SELECT CAST(TRUE AS STRING) FROM src LIMIT 1; +-- Conversion of main primitive types to String type: +SELECT CAST(NULL AS STRING) FROM src tablesample (1 rows); -SELECT CAST(CAST(1 AS TINYINT) AS STRING) FROM src LIMIT 1; -SELECT CAST(CAST(-18 AS SMALLINT) AS STRING) FROM src LIMIT 1; -SELECT CAST(-129 AS STRING) FROM src LIMIT 1; -SELECT CAST(CAST(-1025 AS BIGINT) AS STRING) FROM src LIMIT 1; +SELECT CAST(TRUE AS STRING) FROM src tablesample (1 rows); -SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src LIMIT 1; -SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src LIMIT 1; +SELECT CAST(CAST(1 AS TINYINT) AS STRING) FROM src tablesample (1 rows); +SELECT CAST(CAST(-18 AS SMALLINT) AS STRING) FROM src tablesample (1 rows); +SELECT CAST(-129 AS STRING) FROM src tablesample (1 rows); +SELECT CAST(CAST(-1025 AS BIGINT) AS STRING) FROM src tablesample (1 rows); -SELECT CAST('Foo' AS STRING) FROM src LIMIT 1; +SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src tablesample (1 rows); +SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src tablesample (1 rows); + +SELECT CAST('Foo' AS STRING) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q b/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q index 3024074..5c71723 100644 --- a/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q +++ b/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION to_unix_timestamp; DESCRIBE FUNCTION EXTENDED to_unix_timestamp; diff --git a/ql/src/test/queries/clientpositive/udf_translate.q b/ql/src/test/queries/clientpositive/udf_translate.q index cba6ff9..21d7998 100644 --- a/ql/src/test/queries/clientpositive/udf_translate.q +++ b/ql/src/test/queries/clientpositive/udf_translate.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION translate; DESCRIBE FUNCTION EXTENDED translate; @@ -10,28 +12,28 @@ FROM src INSERT OVERWRITE TABLE table_translate SELECT 'abcd', 'ahd', '12' WHERE -- Run some queries on constant input parameters SELECT translate('abcd', 'ab', '12'), - translate('abcd', 'abc', '12') FROM src LIMIT 1; + translate('abcd', 'abc', '12') FROM src tablesample (1 rows); -- Run some queries where first parameter being a table column while the other two being constants SELECT translate(table_input.input, 'ab', '12'), - translate(table_input.input, 'abc', '12') FROM table_input LIMIT 1; + translate(table_input.input, 'abc', '12') FROM table_input tablesample (1 rows); -- Run some queries where all parameters are coming from table columns -SELECT translate(input_string, from_string, to_string) FROM table_translate LIMIT 1; +SELECT translate(input_string, from_string, to_string) FROM table_translate tablesample (1 rows); -- Run some queries where some parameters are NULL SELECT translate(NULL, 'ab', '12'), translate('abcd', NULL, '12'), translate('abcd', 'ab', NULL), - translate(NULL, NULL, NULL) FROM src LIMIT 1; + translate(NULL, NULL, NULL) FROM src tablesample (1 rows); -- Run some queries where the same character appears several times in the from string (2nd argument) of the UDF SELECT translate('abcd', 'aba', '123'), - translate('abcd', 'aba', '12') FROM src LIMIT 1; + translate('abcd', 'aba', '12') FROM src tablesample (1 rows); -- Run some queries for the ignorant case when the 3rd parameter has more characters than the second one -SELECT translate('abcd', 'abc', '1234') FROM src LIMIT 1; +SELECT translate('abcd', 'abc', '1234') FROM src tablesample (1 rows); -- Test proper function over UTF-8 characters -SELECT translate('Àbcd', 'À', 'Ã') FROM src LIMIT 1; +SELECT translate('Àbcd', 'À', 'Ã') FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_unhex.q b/ql/src/test/queries/clientpositive/udf_unhex.q index e80021c..257e469 100644 --- a/ql/src/test/queries/clientpositive/udf_unhex.q +++ b/ql/src/test/queries/clientpositive/udf_unhex.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION unhex; DESCRIBE FUNCTION EXTENDED unhex; @@ -9,11 +11,11 @@ SELECT unhex('61'), unhex('2D34'), unhex('') -FROM src limit 1; +FROM src tablesample (1 rows); -- Bad inputs SELECT unhex('MySQL'), unhex('G123'), unhex('\0') -FROM src limit 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_union.q b/ql/src/test/queries/clientpositive/udf_union.q index 9140d22..3876beb 100644 --- a/ql/src/test/queries/clientpositive/udf_union.q +++ b/ql/src/test/queries/clientpositive/udf_union.q @@ -1,11 +1,13 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION create_union; DESCRIBE FUNCTION EXTENDED create_union; EXPLAIN SELECT create_union(0, key), create_union(if(key<100, 0, 1), 2.0, value), create_union(1, "a", struct(2, "b")) -FROM src LIMIT 2; +FROM src tablesample (2 rows); SELECT create_union(0, key), create_union(if(key<100, 0, 1), 2.0, value), create_union(1, "a", struct(2, "b")) -FROM src LIMIT 2; +FROM src tablesample (2 rows); diff --git a/ql/src/test/queries/clientpositive/udf_unix_timestamp.q b/ql/src/test/queries/clientpositive/udf_unix_timestamp.q index 89288a1..1357a86 100644 --- a/ql/src/test/queries/clientpositive/udf_unix_timestamp.q +++ b/ql/src/test/queries/clientpositive/udf_unix_timestamp.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION unix_timestamp; DESCRIBE FUNCTION EXTENDED unix_timestamp; diff --git a/ql/src/test/queries/clientpositive/udf_weekofyear.q b/ql/src/test/queries/clientpositive/udf_weekofyear.q index 4b7b4ea..abb0a2d 100644 --- a/ql/src/test/queries/clientpositive/udf_weekofyear.q +++ b/ql/src/test/queries/clientpositive/udf_weekofyear.q @@ -1,6 +1,8 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION weekofyear; DESCRIBE FUNCTION EXTENDED weekofyear; SELECT weekofyear('1980-01-01'), weekofyear('1980-01-06'), weekofyear('1980-01-07'), weekofyear('1980-12-31'), weekofyear('1984-1-1'), weekofyear('2008-02-20 00:00:00'), weekofyear('1980-12-28 23:59:59'), weekofyear('1980-12-29 23:59:59') -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_when.q b/ql/src/test/queries/clientpositive/udf_when.q index d94a594..ec8c42e 100644 --- a/ql/src/test/queries/clientpositive/udf_when.q +++ b/ql/src/test/queries/clientpositive/udf_when.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION when; DESCRIBE FUNCTION EXTENDED when; @@ -27,7 +29,7 @@ SELECT CASE WHEN 25=26 THEN 27 WHEN 28=28 THEN NULL END -FROM src LIMIT 1; +FROM src tablesample (1 rows); SELECT CASE WHEN 1=1 THEN 2 @@ -54,4 +56,4 @@ SELECT CASE WHEN 25=26 THEN 27 WHEN 28=28 THEN NULL END -FROM src LIMIT 1; +FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_xpath.q b/ql/src/test/queries/clientpositive/udf_xpath.q index fca1ba1..1ad38ab 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath.q +++ b/ql/src/test/queries/clientpositive/udf_xpath.q @@ -1,8 +1,10 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath ; DESCRIBE FUNCTION EXTENDED xpath ; -SELECT xpath ('b1b2b3c1c2', 'a/text()') FROM src LIMIT 1 ; -SELECT xpath ('b1b2b3c1c2', 'a/*/text()') FROM src LIMIT 1 ; -SELECT xpath ('b1b2b3c1c2', 'a/b/text()') FROM src LIMIT 1 ; -SELECT xpath ('b1b2b3c1c2', 'a/c/text()') FROM src LIMIT 1 ; -SELECT xpath ('b1b2b3c1c2', 'a/*[@class="bb"]/text()') FROM src LIMIT 1 ; \ No newline at end of file +SELECT xpath ('b1b2b3c1c2', 'a/text()') FROM src tablesample (1 rows) ; +SELECT xpath ('b1b2b3c1c2', 'a/*/text()') FROM src tablesample (1 rows) ; +SELECT xpath ('b1b2b3c1c2', 'a/b/text()') FROM src tablesample (1 rows) ; +SELECT xpath ('b1b2b3c1c2', 'a/c/text()') FROM src tablesample (1 rows) ; +SELECT xpath ('b1b2b3c1c2', 'a/*[@class="bb"]/text()') FROM src tablesample (1 rows) ; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_xpath_boolean.q b/ql/src/test/queries/clientpositive/udf_xpath_boolean.q index 3a6e613..6e3ff24 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_boolean.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_boolean.q @@ -1,9 +1,11 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_boolean ; DESCRIBE FUNCTION EXTENDED xpath_boolean ; -SELECT xpath_boolean ('b', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_boolean ('b', 'a/c') FROM src LIMIT 1 ; -SELECT xpath_boolean ('b', 'a/b = "b"') FROM src LIMIT 1 ; -SELECT xpath_boolean ('b', 'a/b = "c"') FROM src LIMIT 1 ; -SELECT xpath_boolean ('10', 'a/b < 10') FROM src LIMIT 1 ; -SELECT xpath_boolean ('10', 'a/b = 10') FROM src LIMIT 1 ; +SELECT xpath_boolean ('b', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_boolean ('b', 'a/c') FROM src tablesample (1 rows) ; +SELECT xpath_boolean ('b', 'a/b = "b"') FROM src tablesample (1 rows) ; +SELECT xpath_boolean ('b', 'a/b = "c"') FROM src tablesample (1 rows) ; +SELECT xpath_boolean ('10', 'a/b < 10') FROM src tablesample (1 rows) ; +SELECT xpath_boolean ('10', 'a/b = 10') FROM src tablesample (1 rows) ; diff --git a/ql/src/test/queries/clientpositive/udf_xpath_double.q b/ql/src/test/queries/clientpositive/udf_xpath_double.q index 4328747..6844176 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_double.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_double.q @@ -1,14 +1,16 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_number ; DESCRIBE FUNCTION EXTENDED xpath_number ; DESCRIBE FUNCTION xpath_double ; DESCRIBE FUNCTION EXTENDED xpath_double ; -SELECT xpath_double ('this is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_double ('this 2 is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_double ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 ; -SELECT xpath_double ('try a boolean', 'a = 10') FROM src LIMIT 1 ; -SELECT xpath_double ('1248', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_double ('1248', 'sum(a/*)') FROM src LIMIT 1 ; -SELECT xpath_double ('1248', 'sum(a/b)') FROM src LIMIT 1 ; -SELECT xpath_double ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 ; \ No newline at end of file +SELECT xpath_double ('this is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_double ('this 2 is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_double ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) ; +SELECT xpath_double ('try a boolean', 'a = 10') FROM src tablesample (1 rows) ; +SELECT xpath_double ('1248', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_double ('1248', 'sum(a/*)') FROM src tablesample (1 rows) ; +SELECT xpath_double ('1248', 'sum(a/b)') FROM src tablesample (1 rows) ; +SELECT xpath_double ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) ; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_xpath_float.q b/ql/src/test/queries/clientpositive/udf_xpath_float.q index 1f14828..4596a32 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_float.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_float.q @@ -1,11 +1,13 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_float ; DESCRIBE FUNCTION EXTENDED xpath_float ; -SELECT xpath_float ('this is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_float ('this 2 is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_float ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 ; -SELECT xpath_float ('try a boolean', 'a = 10') FROM src LIMIT 1 ; -SELECT xpath_float ('1248', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_float ('1248', 'sum(a/*)') FROM src LIMIT 1 ; -SELECT xpath_float ('1248', 'sum(a/b)') FROM src LIMIT 1 ; -SELECT xpath_float ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 ; \ No newline at end of file +SELECT xpath_float ('this is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_float ('this 2 is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_float ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) ; +SELECT xpath_float ('try a boolean', 'a = 10') FROM src tablesample (1 rows) ; +SELECT xpath_float ('1248', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_float ('1248', 'sum(a/*)') FROM src tablesample (1 rows) ; +SELECT xpath_float ('1248', 'sum(a/b)') FROM src tablesample (1 rows) ; +SELECT xpath_float ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) ; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_xpath_int.q b/ql/src/test/queries/clientpositive/udf_xpath_int.q index 9b50bd9..9f3898f 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_int.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_int.q @@ -1,11 +1,13 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_int ; DESCRIBE FUNCTION EXTENDED xpath_int ; -SELECT xpath_int ('this is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_int ('this 2 is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_int ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 ; -SELECT xpath_int ('try a boolean', 'a = 10') FROM src LIMIT 1 ; -SELECT xpath_int ('1248', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_int ('1248', 'sum(a/*)') FROM src LIMIT 1 ; -SELECT xpath_int ('1248', 'sum(a/b)') FROM src LIMIT 1 ; -SELECT xpath_int ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 ; \ No newline at end of file +SELECT xpath_int ('this is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_int ('this 2 is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_int ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) ; +SELECT xpath_int ('try a boolean', 'a = 10') FROM src tablesample (1 rows) ; +SELECT xpath_int ('1248', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_int ('1248', 'sum(a/*)') FROM src tablesample (1 rows) ; +SELECT xpath_int ('1248', 'sum(a/b)') FROM src tablesample (1 rows) ; +SELECT xpath_int ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) ; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_xpath_long.q b/ql/src/test/queries/clientpositive/udf_xpath_long.q index 04ee61b..3a33593 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_long.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_long.q @@ -1,11 +1,13 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_long ; DESCRIBE FUNCTION EXTENDED xpath_long ; -SELECT xpath_long ('this is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_long ('this 2 is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_long ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 ; -SELECT xpath_long ('try a boolean', 'a = 10') FROM src LIMIT 1 ; -SELECT xpath_long ('1248', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_long ('1248', 'sum(a/*)') FROM src LIMIT 1 ; -SELECT xpath_long ('1248', 'sum(a/b)') FROM src LIMIT 1 ; -SELECT xpath_long ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 ; +SELECT xpath_long ('this is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_long ('this 2 is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_long ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) ; +SELECT xpath_long ('try a boolean', 'a = 10') FROM src tablesample (1 rows) ; +SELECT xpath_long ('1248', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_long ('1248', 'sum(a/*)') FROM src tablesample (1 rows) ; +SELECT xpath_long ('1248', 'sum(a/b)') FROM src tablesample (1 rows) ; +SELECT xpath_long ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) ; diff --git a/ql/src/test/queries/clientpositive/udf_xpath_short.q b/ql/src/test/queries/clientpositive/udf_xpath_short.q index 6a1abdc..073056e 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_short.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_short.q @@ -1,11 +1,13 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_short ; DESCRIBE FUNCTION EXTENDED xpath_short ; -SELECT xpath_short ('this is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_short ('this 2 is not a number', 'a') FROM src LIMIT 1 ; -SELECT xpath_short ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 ; -SELECT xpath_short ('try a boolean', 'a = 10') FROM src LIMIT 1 ; -SELECT xpath_short ('1248', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_short ('1248', 'sum(a/*)') FROM src LIMIT 1 ; -SELECT xpath_short ('1248', 'sum(a/b)') FROM src LIMIT 1 ; -SELECT xpath_short ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 ; \ No newline at end of file +SELECT xpath_short ('this is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_short ('this 2 is not a number', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_short ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) ; +SELECT xpath_short ('try a boolean', 'a = 10') FROM src tablesample (1 rows) ; +SELECT xpath_short ('1248', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_short ('1248', 'sum(a/*)') FROM src tablesample (1 rows) ; +SELECT xpath_short ('1248', 'sum(a/b)') FROM src tablesample (1 rows) ; +SELECT xpath_short ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) ; \ No newline at end of file diff --git a/ql/src/test/queries/clientpositive/udf_xpath_string.q b/ql/src/test/queries/clientpositive/udf_xpath_string.q index ebbc913..1f1731c 100644 --- a/ql/src/test/queries/clientpositive/udf_xpath_string.q +++ b/ql/src/test/queries/clientpositive/udf_xpath_string.q @@ -1,11 +1,13 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION xpath_string ; DESCRIBE FUNCTION EXTENDED xpath_string ; -SELECT xpath_string ('bbcc', 'a') FROM src LIMIT 1 ; -SELECT xpath_string ('bbcc', 'a/b') FROM src LIMIT 1 ; -SELECT xpath_string ('bbcc', 'a/c') FROM src LIMIT 1 ; -SELECT xpath_string ('bbcc', 'a/d') FROM src LIMIT 1 ; -SELECT xpath_string ('b1b2', '//b') FROM src LIMIT 1 ; -SELECT xpath_string ('b1b2', 'a/b[1]') FROM src LIMIT 1 ; -SELECT xpath_string ('b1b2', 'a/b[2]') FROM src LIMIT 1 ; -SELECT xpath_string ('b1b2', 'a/b[@id="b_2"]') FROM src LIMIT 1 ; +SELECT xpath_string ('bbcc', 'a') FROM src tablesample (1 rows) ; +SELECT xpath_string ('bbcc', 'a/b') FROM src tablesample (1 rows) ; +SELECT xpath_string ('bbcc', 'a/c') FROM src tablesample (1 rows) ; +SELECT xpath_string ('bbcc', 'a/d') FROM src tablesample (1 rows) ; +SELECT xpath_string ('b1b2', '//b') FROM src tablesample (1 rows) ; +SELECT xpath_string ('b1b2', 'a/b[1]') FROM src tablesample (1 rows) ; +SELECT xpath_string ('b1b2', 'a/b[2]') FROM src tablesample (1 rows) ; +SELECT xpath_string ('b1b2', 'a/b[@id="b_2"]') FROM src tablesample (1 rows) ; diff --git a/ql/src/test/queries/clientpositive/udtf_explode.q b/ql/src/test/queries/clientpositive/udtf_explode.q index 638a4e9..1d405b3 100644 --- a/ql/src/test/queries/clientpositive/udtf_explode.q +++ b/ql/src/test/queries/clientpositive/udtf_explode.q @@ -1,3 +1,5 @@ +set hive.fetch.task.conversion=more; + DESCRIBE FUNCTION explode; DESCRIBE FUNCTION EXTENDED explode; diff --git a/ql/src/test/queries/clientpositive/udtf_json_tuple.q b/ql/src/test/queries/clientpositive/udtf_json_tuple.q index 712d959..93d829d 100644 --- a/ql/src/test/queries/clientpositive/udtf_json_tuple.q +++ b/ql/src/test/queries/clientpositive/udtf_json_tuple.q @@ -2,17 +2,17 @@ create table json_t (key string, jstring string); insert overwrite table json_t select * from ( - select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src limit 1 + select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src tablesample (1 rows) union all - select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src limit 1 + select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src tablesample (1 rows) union all - select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src limit 1 + select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', '{"f1": "", "f5": null}' from src limit 1 + select '5', '{"f1": "", "f5": null}' from src tablesample (1 rows) union all - select '6', '[invalid JSON string]' from src limit 1 + select '6', '[invalid JSON string]' from src tablesample (1 rows) ) s; explain @@ -40,7 +40,7 @@ select f2, count(*) from json_t a lateral view json_tuple(a.jstring, 'f1', 'f2', CREATE TABLE dest1(c1 STRING) STORED AS RCFILE; -INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src LIMIT 1; +INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows); SELECT * FROM dest1; diff --git a/ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q b/ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q index 055e39b..0870cbc 100644 --- a/ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q +++ b/ql/src/test/queries/clientpositive/udtf_parse_url_tuple.q @@ -2,17 +2,17 @@ create table url_t (key string, fullurl string); insert overwrite table url_t select * from ( - select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src limit 1 + select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src tablesample (1 rows) union all - select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src limit 1 + select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src tablesample (1 rows) union all - select '3', 'ftp://sites.google.com/a/example.com/site/page' from src limit 1 + select '3', 'ftp://sites.google.com/a/example.com/site/page' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', 'htttp://' from src limit 1 + select '5', 'htttp://' from src tablesample (1 rows) union all - select '6', '[invalid url string]' from src limit 1 + select '6', '[invalid url string]' from src tablesample (1 rows) ) s; describe function parse_url_tuple; diff --git a/ql/src/test/queries/clientpositive/union34.q b/ql/src/test/queries/clientpositive/union34.q index 36bc865..238b583 100644 --- a/ql/src/test/queries/clientpositive/union34.q +++ b/ql/src/test/queries/clientpositive/union34.q @@ -3,7 +3,7 @@ create table src10_2 (key string, value string); create table src10_3 (key string, value string); create table src10_4 (key string, value string); -from (select * from src limit 10) a +from (select * from src tablesample (10 rows)) a insert overwrite table src10_1 select * insert overwrite table src10_2 select * insert overwrite table src10_3 select * diff --git a/ql/src/test/queries/clientpositive/varchar_cast.q b/ql/src/test/queries/clientpositive/varchar_cast.q index 550f3dc..c356b1d 100644 --- a/ql/src/test/queries/clientpositive/varchar_cast.q +++ b/ql/src/test/queries/clientpositive/varchar_cast.q @@ -1,3 +1,4 @@ +set hive.fetch.task.conversion=more; -- Cast from varchar to other data types select diff --git a/ql/src/test/queries/clientpositive/varchar_comparison.q b/ql/src/test/queries/clientpositive/varchar_comparison.q index b6c6f40..05cad85 100644 --- a/ql/src/test/queries/clientpositive/varchar_comparison.q +++ b/ql/src/test/queries/clientpositive/varchar_comparison.q @@ -1,3 +1,4 @@ +set hive.fetch.task.conversion=more; -- Should all be true select diff --git a/ql/src/test/results/clientpositive/alter_varchar2.q.out b/ql/src/test/results/clientpositive/alter_varchar2.q.out index bd0e5d6..c9fb199 100644 --- a/ql/src/test/results/clientpositive/alter_varchar2.q.out +++ b/ql/src/test/results/clientpositive/alter_varchar2.q.out @@ -14,12 +14,12 @@ POSTHOOK: query: create table alter_varchar2 ( POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@alter_varchar2 PREHOOK: query: insert overwrite table alter_varchar2 partition (hr=1) - select value from src limit 1 + select value from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@alter_varchar2@hr=1 POSTHOOK: query: insert overwrite table alter_varchar2 partition (hr=1) - select value from src limit 1 + select value from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@alter_varchar2@hr=1 @@ -58,12 +58,12 @@ POSTHOOK: Input: default@alter_varchar2@hr=1 POSTHOOK: Lineage: alter_varchar2 PARTITION(hr=1).c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] 1 val_238 7 PREHOOK: query: insert overwrite table alter_varchar2 partition (hr=2) - select key from src limit 1 + select key from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@alter_varchar2@hr=2 POSTHOOK: query: insert overwrite table alter_varchar2 partition (hr=2) - select key from src limit 1 + select key from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@alter_varchar2@hr=2 diff --git a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out index 87b79d0..ef5274c 100644 --- a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out +++ b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out @@ -22,11 +22,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table orderpayment_small (`dealid` int,`date` string,`time` string, `cityid` int, `userid` int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@orderpayment_small -PREHOOK: query: insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc limit 1 +PREHOOK: query: insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@testsrc PREHOOK: Output: default@orderpayment_small -POSTHOOK: query: insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc limit 1 +POSTHOOK: query: insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@testsrc POSTHOOK: Output: default@orderpayment_small @@ -54,11 +54,11 @@ POSTHOOK: Lineage: orderpayment_small.date SIMPLE [] POSTHOOK: Lineage: orderpayment_small.dealid SIMPLE [] POSTHOOK: Lineage: orderpayment_small.time SIMPLE [] POSTHOOK: Lineage: orderpayment_small.userid SIMPLE [] -PREHOOK: query: insert overwrite table user_small select key from testsrc limit 100 +PREHOOK: query: insert overwrite table user_small select key from testsrc tablesample (100 rows) PREHOOK: type: QUERY PREHOOK: Input: default@testsrc PREHOOK: Output: default@user_small -POSTHOOK: query: insert overwrite table user_small select key from testsrc limit 100 +POSTHOOK: query: insert overwrite table user_small select key from testsrc tablesample (100 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@testsrc POSTHOOK: Output: default@user_small diff --git a/ql/src/test/results/clientpositive/binary_constant.q.out b/ql/src/test/results/clientpositive/binary_constant.q.out index 2b13471..499685a 100644 --- a/ql/src/test/results/clientpositive/binary_constant.q.out +++ b/ql/src/test/results/clientpositive/binary_constant.q.out @@ -1,8 +1,8 @@ -PREHOOK: query: select cast(cast('a' as binary) as string) from src limit 1 +PREHOOK: query: select cast(cast('a' as binary) as string) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select cast(cast('a' as binary) as string) from src limit 1 +POSTHOOK: query: select cast(cast('a' as binary) as string) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/cast_to_int.q.out b/ql/src/test/results/clientpositive/cast_to_int.q.out index 04da595..398b13f 100644 --- a/ql/src/test/results/clientpositive/cast_to_int.q.out +++ b/ql/src/test/results/clientpositive/cast_to_int.q.out @@ -27,7 +27,7 @@ select cast('127' as tinyint), cast('1.0a' as int), cast('-1.-1' as int) -from src limit 1 +from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -60,7 +60,7 @@ select cast('127' as tinyint), cast('1.0a' as int), cast('-1.-1' as int) -from src limit 1 +from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/compile_processor.q.out b/ql/src/test/results/clientpositive/compile_processor.q.out index d21fd4e..7e9bb29 100644 --- a/ql/src/test/results/clientpositive/compile_processor.q.out +++ b/ql/src/test/results/clientpositive/compile_processor.q.out @@ -2,11 +2,11 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION Pyth as 'Pyth' PREHOOK: type: CREATEFUNCTION POSTHOOK: query: CREATE TEMPORARY FUNCTION Pyth as 'Pyth' POSTHOOK: type: CREATEFUNCTION -PREHOOK: query: SELECT Pyth(3,4) FROM src limit 1 +PREHOOK: query: SELECT Pyth(3,4) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT Pyth(3,4) FROM src limit 1 +POSTHOOK: query: SELECT Pyth(3,4) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/constant_prop.q.out b/ql/src/test/results/clientpositive/constant_prop.q.out index 76713ce..0b3bb76 100644 --- a/ql/src/test/results/clientpositive/constant_prop.q.out +++ b/ql/src/test/results/clientpositive/constant_prop.q.out @@ -7,7 +7,7 @@ SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, IF(ARRAY_CONTAINS(MAP_KEYS(MAP("b", "x")), "b"), "F2", "B2"), 2 ).F2 - FROM src LIMIT 1 + FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT NAMED_STRUCT( @@ -18,41 +18,30 @@ SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, IF(ARRAY_CONTAINS(MAP_KEYS(MAP("b", "x")), "b"), "F2", "B2"), 2 ).F2 - FROM src LIMIT 1 + FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION NAMED_STRUCT (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION ARRAY 1 2) 3) "F1" "B1") 1 (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION MAP_KEYS (TOK_FUNCTION MAP "b" "x")) "b") "F2" "B2") 2)) (TOK_SELEXPR (. (TOK_FUNCTION NAMED_STRUCT (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION ARRAY 1 2) 3) "F1" "B1") 1 (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION MAP_KEYS (TOK_FUNCTION MAP "b" "x")) "b") "F2" "B2") 2) F2))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION NAMED_STRUCT (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION ARRAY 1 2) 3) "F1" "B1") 1 (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION MAP_KEYS (TOK_FUNCTION MAP "b" "x")) "b") "F2" "B2") 2)) (TOK_SELEXPR (. (TOK_FUNCTION NAMED_STRUCT (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION ARRAY 1 2) 3) "F1" "B1") 1 (TOK_FUNCTION IF (TOK_FUNCTION ARRAY_CONTAINS (TOK_FUNCTION MAP_KEYS (TOK_FUNCTION MAP "b" "x")) "b") "F2" "B2") 2) F2))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: named_struct(if(array_contains(array(1,2), 3), 'F1', 'B1'),1,if(array_contains(map_keys(map('b':'x')), 'b'), 'F2', 'B2'),2) - type: struct - expr: named_struct(if(array_contains(array(1,2), 3), 'F1', 'B1'),1,if(array_contains(map_keys(map('b':'x')), 'b'), 'F2', 'B2'),2).F2 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: named_struct(if(array_contains(array(1,2), 3), 'F1', 'B1'),1,if(array_contains(map_keys(map('b':'x')), 'b'), 'F2', 'B2'),2) + type: struct + expr: named_struct(if(array_contains(array(1,2), 3), 'F1', 'B1'),1,if(array_contains(map_keys(map('b':'x')), 'b'), 'F2', 'B2'),2).F2 + type: int + outputColumnNames: _col0, _col1 + ListSink PREHOOK: query: SELECT NAMED_STRUCT( @@ -63,7 +52,7 @@ PREHOOK: query: SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, IF(ARRAY_CONTAINS(MAP_KEYS(MAP("b", "x")), "b"), "F2", "B2"), 2 ).F2 - FROM src LIMIT 1 + FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -75,7 +64,7 @@ POSTHOOK: query: SELECT NAMED_STRUCT( IF(ARRAY_CONTAINS(ARRAY(1, 2), 3), "F1", "B1"), 1, IF(ARRAY_CONTAINS(MAP_KEYS(MAP("b", "x")), "b"), "F2", "B2"), 2 ).F2 - FROM src LIMIT 1 + FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/date_1.q.out b/ql/src/test/results/clientpositive/date_1.q.out index 354681b..d74abad 100644 --- a/ql/src/test/results/clientpositive/date_1.q.out +++ b/ql/src/test/results/clientpositive/date_1.q.out @@ -8,12 +8,12 @@ POSTHOOK: query: create table date_1 (d date) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@date_1 PREHOOK: query: insert overwrite table date_1 - select cast('2011-01-01' as date) from src limit 1 + select cast('2011-01-01' as date) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@date_1 POSTHOOK: query: insert overwrite table date_1 - select cast('2011-01-01' as date) from src limit 1 + select cast('2011-01-01' as date) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@date_1 @@ -39,12 +39,12 @@ POSTHOOK: Input: default@date_1 POSTHOOK: Lineage: date_1.d EXPRESSION [] 2011-01-01 1 PREHOOK: query: insert overwrite table date_1 - select date '2011-01-01' from src limit 1 + select date '2011-01-01' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@date_1 POSTHOOK: query: insert overwrite table date_1 - select date '2011-01-01' from src limit 1 + select date '2011-01-01' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@date_1 @@ -73,12 +73,12 @@ POSTHOOK: Lineage: date_1.d EXPRESSION [] POSTHOOK: Lineage: date_1.d SIMPLE [] 2011-01-01 1 PREHOOK: query: insert overwrite table date_1 - select cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src limit 1 + select cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@date_1 POSTHOOK: query: insert overwrite table date_1 - select cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src limit 1 + select cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@date_1 diff --git a/ql/src/test/results/clientpositive/date_3.q.out b/ql/src/test/results/clientpositive/date_3.q.out index 230539e..2d06b19 100644 --- a/ql/src/test/results/clientpositive/date_3.q.out +++ b/ql/src/test/results/clientpositive/date_3.q.out @@ -20,12 +20,12 @@ POSTHOOK: type: ALTERTABLE_ADDCOLS POSTHOOK: Input: default@date_3 POSTHOOK: Output: default@date_3 PREHOOK: query: insert overwrite table date_3 - select 1, cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src limit 1 + select 1, cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@date_3 POSTHOOK: query: insert overwrite table date_3 - select 1, cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src limit 1 + select 1, cast(cast('2011-01-01 00:00:00' as timestamp) as date) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@date_3 diff --git a/ql/src/test/results/clientpositive/date_4.q.out b/ql/src/test/results/clientpositive/date_4.q.out index a97d522..25adf52 100644 --- a/ql/src/test/results/clientpositive/date_4.q.out +++ b/ql/src/test/results/clientpositive/date_4.q.out @@ -17,13 +17,13 @@ POSTHOOK: Input: default@date_4 POSTHOOK: Output: default@date_4 PREHOOK: query: -- Test date literal syntax insert overwrite table date_4 - select date '2011-01-01' from src limit 1 + select date '2011-01-01' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@date_4 POSTHOOK: query: -- Test date literal syntax insert overwrite table date_4 - select date '2011-01-01' from src limit 1 + select date '2011-01-01' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@date_4 diff --git a/ql/src/test/results/clientpositive/decimal_1.q.out b/ql/src/test/results/clientpositive/decimal_1.q.out index c0a4348..26cc4e2 100644 --- a/ql/src/test/results/clientpositive/decimal_1.q.out +++ b/ql/src/test/results/clientpositive/decimal_1.q.out @@ -16,101 +16,101 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@decimal_1 POSTHOOK: Output: default@decimal_1 PREHOOK: query: insert overwrite table decimal_1 - select cast('17.29' as decimal(4,2)) from src limit 1 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@decimal_1 POSTHOOK: query: insert overwrite table decimal_1 - select cast('17.29' as decimal(4,2)) from src limit 1 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@decimal_1 POSTHOOK: Lineage: decimal_1.t EXPRESSION [] -PREHOOK: query: select cast(t as boolean) from decimal_1 limit 1 +PREHOOK: query: select cast(t as boolean) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as boolean) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as boolean) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] true -PREHOOK: query: select cast(t as tinyint) from decimal_1 limit 1 +PREHOOK: query: select cast(t as tinyint) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as tinyint) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as tinyint) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as smallint) from decimal_1 limit 1 +PREHOOK: query: select cast(t as smallint) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as smallint) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as smallint) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as int) from decimal_1 limit 1 +PREHOOK: query: select cast(t as int) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as int) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as int) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as bigint) from decimal_1 limit 1 +PREHOOK: query: select cast(t as bigint) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as bigint) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as bigint) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as float) from decimal_1 limit 1 +PREHOOK: query: select cast(t as float) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as float) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as float) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17.29 -PREHOOK: query: select cast(t as double) from decimal_1 limit 1 +PREHOOK: query: select cast(t as double) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as double) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as double) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17.29 -PREHOOK: query: select cast(t as string) from decimal_1 limit 1 +PREHOOK: query: select cast(t as string) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as string) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as string) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_1.t EXPRESSION [] 17.29 -PREHOOK: query: select cast(t as timestamp) from decimal_1 limit 1 +PREHOOK: query: select cast(t as timestamp) from decimal_1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_1 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as timestamp) from decimal_1 limit 1 +POSTHOOK: query: select cast(t as timestamp) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/decimal_2.q.out b/ql/src/test/results/clientpositive/decimal_2.q.out index 0c20c61..28ee59d 100644 --- a/ql/src/test/results/clientpositive/decimal_2.q.out +++ b/ql/src/test/results/clientpositive/decimal_2.q.out @@ -16,300 +16,300 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@decimal_2 POSTHOOK: Output: default@decimal_2 PREHOOK: query: insert overwrite table decimal_2 - select cast('17.29' as decimal(4,2)) from src limit 1 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@decimal_2 POSTHOOK: query: insert overwrite table decimal_2 - select cast('17.29' as decimal(4,2)) from src limit 1 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@decimal_2 POSTHOOK: Lineage: decimal_2.t EXPRESSION [] -PREHOOK: query: select cast(t as boolean) from decimal_2 limit 1 +PREHOOK: query: select cast(t as boolean) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as boolean) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as boolean) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] true -PREHOOK: query: select cast(t as tinyint) from decimal_2 limit 1 +PREHOOK: query: select cast(t as tinyint) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as tinyint) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as tinyint) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as smallint) from decimal_2 limit 1 +PREHOOK: query: select cast(t as smallint) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as smallint) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as smallint) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as int) from decimal_2 limit 1 +PREHOOK: query: select cast(t as int) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as int) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as int) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as bigint) from decimal_2 limit 1 +PREHOOK: query: select cast(t as bigint) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as bigint) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as bigint) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17 -PREHOOK: query: select cast(t as float) from decimal_2 limit 1 +PREHOOK: query: select cast(t as float) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as float) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as float) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17.29 -PREHOOK: query: select cast(t as double) from decimal_2 limit 1 +PREHOOK: query: select cast(t as double) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as double) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as double) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17.29 -PREHOOK: query: select cast(t as string) from decimal_2 limit 1 +PREHOOK: query: select cast(t as string) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as string) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as string) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 17.29 PREHOOK: query: insert overwrite table decimal_2 - select cast('3404045.5044003' as decimal(18,9)) from src limit 1 + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@decimal_2 POSTHOOK: query: insert overwrite table decimal_2 - select cast('3404045.5044003' as decimal(18,9)) from src limit 1 + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@decimal_2 POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] -PREHOOK: query: select cast(t as boolean) from decimal_2 limit 1 +PREHOOK: query: select cast(t as boolean) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as boolean) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as boolean) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] true -PREHOOK: query: select cast(t as tinyint) from decimal_2 limit 1 +PREHOOK: query: select cast(t as tinyint) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as tinyint) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as tinyint) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 13 -PREHOOK: query: select cast(t as smallint) from decimal_2 limit 1 +PREHOOK: query: select cast(t as smallint) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as smallint) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as smallint) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] -3827 -PREHOOK: query: select cast(t as int) from decimal_2 limit 1 +PREHOOK: query: select cast(t as int) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as int) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as int) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3404045 -PREHOOK: query: select cast(t as bigint) from decimal_2 limit 1 +PREHOOK: query: select cast(t as bigint) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as bigint) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as bigint) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3404045 -PREHOOK: query: select cast(t as float) from decimal_2 limit 1 +PREHOOK: query: select cast(t as float) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as float) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as float) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3404045.5 -PREHOOK: query: select cast(t as double) from decimal_2 limit 1 +PREHOOK: query: select cast(t as double) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as double) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as double) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3404045.5044003 -PREHOOK: query: select cast(t as string) from decimal_2 limit 1 +PREHOOK: query: select cast(t as string) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(t as string) from decimal_2 limit 1 +POSTHOOK: query: select cast(t as string) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3404045.5044003 -PREHOOK: query: select cast(3.14 as decimal(4,2)) from decimal_2 limit 1 +PREHOOK: query: select cast(3.14 as decimal(4,2)) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(3.14 as decimal(4,2)) from decimal_2 limit 1 +POSTHOOK: query: select cast(3.14 as decimal(4,2)) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3.14 -PREHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2 limit 1 +PREHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2 limit 1 +POSTHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3.14 -PREHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2 limit 1 +PREHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2 limit 1 +POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 1355944339.1234567 -PREHOOK: query: select cast(true as decimal) from decimal_2 limit 1 +PREHOOK: query: select cast(true as decimal) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(true as decimal) from decimal_2 limit 1 +POSTHOOK: query: select cast(true as decimal) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 1 -PREHOOK: query: select cast(3Y as decimal) from decimal_2 limit 1 +PREHOOK: query: select cast(3Y as decimal) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(3Y as decimal) from decimal_2 limit 1 +POSTHOOK: query: select cast(3Y as decimal) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3 -PREHOOK: query: select cast(3S as decimal) from decimal_2 limit 1 +PREHOOK: query: select cast(3S as decimal) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(3S as decimal) from decimal_2 limit 1 +POSTHOOK: query: select cast(3S as decimal) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3 -PREHOOK: query: select cast(cast(3 as int) as decimal) from decimal_2 limit 1 +PREHOOK: query: select cast(cast(3 as int) as decimal) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(cast(3 as int) as decimal) from decimal_2 limit 1 +POSTHOOK: query: select cast(cast(3 as int) as decimal) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3 -PREHOOK: query: select cast(3L as decimal) from decimal_2 limit 1 +PREHOOK: query: select cast(3L as decimal) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(3L as decimal) from decimal_2 limit 1 +POSTHOOK: query: select cast(3L as decimal) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 3 -PREHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2 limit 1 +PREHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2 limit 1 +POSTHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### POSTHOOK: Lineage: decimal_2.t EXPRESSION [] POSTHOOK: Lineage: decimal_2.t EXPRESSION [] 1 -PREHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2 limit 1 +PREHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 #### A masked pattern was here #### -POSTHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2 limit 1 +POSTHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/decimal_udf.q.out b/ql/src/test/results/clientpositive/decimal_udf.q.out index 652d1a8..879ab29 100644 --- a/ql/src/test/results/clientpositive/decimal_udf.q.out +++ b/ql/src/test/results/clientpositive/decimal_udf.q.out @@ -29,32 +29,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + key FROM DECIMAL_UDF @@ -111,32 +100,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + value FROM DECIMAL_UDF @@ -193,32 +171,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF @@ -275,32 +242,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) '1.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key + '1.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key + '1.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF @@ -359,32 +315,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - key FROM DECIMAL_UDF @@ -441,32 +386,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - value FROM DECIMAL_UDF @@ -523,32 +457,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF @@ -605,32 +528,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) '1.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key - '1.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key - '1.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF @@ -689,32 +601,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * key FROM DECIMAL_UDF @@ -771,32 +672,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * value FROM DECIMAL_UDF @@ -853,32 +743,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF @@ -935,32 +814,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) '2.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key * '2.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key * '2.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF @@ -1019,33 +887,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) 0))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key / 0) - type: decimal(65,30) - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key / 0) + type: decimal(65,30) + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1 @@ -1065,33 +922,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) TOK_NULL))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key / null) - type: decimal(65,30) - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key / null) + type: decimal(65,30) + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1 @@ -1111,36 +957,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL key)) (<> (TOK_TABLE_OR_COL key) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Filter Operator - predicate: - expr: (key is not null and (key <> 0)) - type: boolean - Select Operator - expressions: - expr: (key / key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Filter Operator + predicate: + expr: (key is not null and (key <> 0)) + type: boolean + Select Operator + expressions: + expr: (key / key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 @@ -1193,36 +1028,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value)) (<> (TOK_TABLE_OR_COL value) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Filter Operator - predicate: - expr: (value is not null and (value <> 0)) - type: boolean - Select Operator - expressions: - expr: (key / value) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Filter Operator + predicate: + expr: (value is not null and (value <> 0)) + type: boolean + Select Operator + expressions: + expr: (key / value) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 @@ -1265,36 +1089,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value)) (<> (TOK_TABLE_OR_COL value) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Filter Operator - predicate: - expr: (value is not null and (value <> 0)) - type: boolean - Select Operator - expressions: - expr: (key / (value / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Filter Operator + predicate: + expr: (value is not null and (value <> 0)) + type: boolean + Select Operator + expressions: + expr: (key / (value / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 @@ -1337,32 +1150,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) '2.0'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (key / '2.0') - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (key / '2.0') + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT key / '2.0' FROM DECIMAL_UDF @@ -1421,32 +1223,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: abs(key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: abs(key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT abs(key) FROM DECIMAL_UDF @@ -1647,32 +1438,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: (- key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: (- key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT -key FROM DECIMAL_UDF @@ -1731,32 +1511,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: key - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: key + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT +key FROM DECIMAL_UDF @@ -1815,32 +1584,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CEIL (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: ceil(key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: ceil(key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF @@ -1899,32 +1657,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION FLOOR (TOK_TABLE_OR_COL key)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: floor(key) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: floor(key) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF @@ -1983,32 +1730,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_TABLE_OR_COL key) 2))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: round(key, 2) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: round(key, 2) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF @@ -2067,32 +1803,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION POWER (TOK_TABLE_OR_COL key) 2))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: power(key, 2) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: power(key, 2) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF @@ -2151,32 +1876,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (% (+ (TOK_TABLE_OR_COL key) 1) (/ (TOK_TABLE_OR_COL key) 2)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - decimal_udf - TableScan - alias: decimal_udf - Select Operator - expressions: - expr: ((key + 1) % (key / 2)) - type: decimal(65,30) - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: + expr: ((key + 1) % (key / 2)) + type: decimal(65,30) + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF diff --git a/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out b/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out index 4cddfdc..9b4f307 100644 --- a/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out +++ b/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out @@ -12,11 +12,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE test_table123 (a INT, b MAP) PARTITIONED BY (ds STRING) STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test_table123 -PREHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table123@ds=foo1 -POSTHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table123@ds=foo1 diff --git a/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out b/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out index d8bff1d..1583207 100644 --- a/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out +++ b/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out @@ -20,11 +20,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table T4 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@T4 -PREHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src limit 1 +PREHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1@ds=2010-04-17 -POSTHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src limit 1 +POSTHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1@ds=2010-04-17 @@ -35,11 +35,11 @@ POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c4 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c5 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c6 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c7 SIMPLE [] -PREHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src limit 1 +PREHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t2@ds=2010-04-17 -POSTHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src limit 1 +POSTHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t2@ds=2010-04-17 @@ -76,11 +76,11 @@ POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c6 SIMPLE [] POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c7 SIMPLE [] POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c8 SIMPLE [] POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c9 SIMPLE [] -PREHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src limit 1 +PREHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t3@ds=2010-04-17 -POSTHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src limit 1 +POSTHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t3@ds=2010-04-17 diff --git a/ql/src/test/results/clientpositive/literal_decimal.q.out b/ql/src/test/results/clientpositive/literal_decimal.q.out index 0ba5043..2d1ae55 100644 --- a/ql/src/test/results/clientpositive/literal_decimal.q.out +++ b/ql/src/test/results/clientpositive/literal_decimal.q.out @@ -6,49 +6,38 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- 1BD)) (TOK_SELEXPR 0BD) (TOK_SELEXPR 1BD) (TOK_SELEXPR 3.14BD) (TOK_SELEXPR (- 3.14BD)) (TOK_SELEXPR 99999999999999999BD) (TOK_SELEXPR 99999999999999999.9999999999999BD) (TOK_SELEXPR 1E-99BD) (TOK_SELEXPR 1E99BD)) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: (- 1) - type: decimal(65,30) - expr: 0 - type: decimal(65,30) - expr: 1 - type: decimal(65,30) - expr: 3.14 - type: decimal(65,30) - expr: (- 3.14) - type: decimal(65,30) - expr: 99999999999999999 - type: decimal(65,30) - expr: 99999999999999999.9999999999999 - type: decimal(65,30) - expr: 1E-99 - type: decimal(65,30) - expr: 1E99 - type: decimal(65,30) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: (- 1) + type: decimal(65,30) + expr: 0 + type: decimal(65,30) + expr: 1 + type: decimal(65,30) + expr: 3.14 + type: decimal(65,30) + expr: (- 3.14) + type: decimal(65,30) + expr: 99999999999999999 + type: decimal(65,30) + expr: 99999999999999999.9999999999999 + type: decimal(65,30) + expr: 1E-99 + type: decimal(65,30) + expr: 1E99 + type: decimal(65,30) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Limit + ListSink PREHOOK: query: SELECT -1BD, 0BD, 1BD, 3.14BD, -3.14BD, 99999999999999999BD, 99999999999999999.9999999999999BD, 1E-99BD, 1E99BD FROM src LIMIT 1 diff --git a/ql/src/test/results/clientpositive/literal_double.q.out b/ql/src/test/results/clientpositive/literal_double.q.out index d60a763..a791a3c 100644 --- a/ql/src/test/results/clientpositive/literal_double.q.out +++ b/ql/src/test/results/clientpositive/literal_double.q.out @@ -6,49 +6,38 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 3.14) (TOK_SELEXPR (- 3.14)) (TOK_SELEXPR 3.14e8) (TOK_SELEXPR 3.14e-8) (TOK_SELEXPR (- 3.14e8)) (TOK_SELEXPR (- 3.14e-8)) (TOK_SELEXPR 3.14e+8) (TOK_SELEXPR 3.14E8) (TOK_SELEXPR 3.14E-8)) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: 3.14 - type: double - expr: (- 3.14) - type: double - expr: 3.14E8 - type: double - expr: 3.14E-8 - type: double - expr: (- 3.14E8) - type: double - expr: (- 3.14E-8) - type: double - expr: 3.14E8 - type: double - expr: 3.14E8 - type: double - expr: 3.14E-8 - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: 3.14 + type: double + expr: (- 3.14) + type: double + expr: 3.14E8 + type: double + expr: 3.14E-8 + type: double + expr: (- 3.14E8) + type: double + expr: (- 3.14E-8) + type: double + expr: 3.14E8 + type: double + expr: 3.14E8 + type: double + expr: 3.14E-8 + type: double + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Limit + ListSink PREHOOK: query: SELECT 3.14, -3.14, 3.14e8, 3.14e-8, -3.14e8, -3.14e-8, 3.14e+8, 3.14E8, 3.14E-8 FROM src LIMIT 1 diff --git a/ql/src/test/results/clientpositive/literal_ints.q.out b/ql/src/test/results/clientpositive/literal_ints.q.out index c86c271..40fde27 100644 --- a/ql/src/test/results/clientpositive/literal_ints.q.out +++ b/ql/src/test/results/clientpositive/literal_ints.q.out @@ -6,39 +6,28 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 100) (TOK_SELEXPR 100Y) (TOK_SELEXPR 100S) (TOK_SELEXPR 100L)) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: 100 - type: int - expr: 100 - type: tinyint - expr: 100 - type: smallint - expr: 100 - type: bigint - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: 100 + type: int + expr: 100 + type: tinyint + expr: 100 + type: smallint + expr: 100 + type: bigint + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + ListSink PREHOOK: query: SELECT 100, 100Y, 100S, 100L FROM src LIMIT 1 diff --git a/ql/src/test/results/clientpositive/literal_string.q.out b/ql/src/test/results/clientpositive/literal_string.q.out index 0c6372a..a10a423 100644 --- a/ql/src/test/results/clientpositive/literal_string.q.out +++ b/ql/src/test/results/clientpositive/literal_string.q.out @@ -16,51 +16,40 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'face''book') (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE 'face' 'book')) (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE 'face' 'book')) (TOK_SELEXPR "face""book") (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE "face" "book")) (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE "face" "book")) (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE 'face' 'bo' 'ok')) (TOK_SELEXPR 'face'"book") (TOK_SELEXPR "face"'book') (TOK_SELEXPR 'facebook')) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - expr: 'facebook' - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + expr: 'facebook' + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + Limit + ListSink PREHOOK: query: SELECT 'face''book', 'face' 'book', 'face' diff --git a/ql/src/test/results/clientpositive/macro.q.out b/ql/src/test/results/clientpositive/macro.q.out index ed26237..46cd501 100644 --- a/ql/src/test/results/clientpositive/macro.q.out +++ b/ql/src/test/results/clientpositive/macro.q.out @@ -19,33 +19,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIGMOID 2))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: SIGMOID(2) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: SIGMOID(2) + type: double + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: EXPLAIN EXTENDED SELECT SIGMOID(2) FROM src LIMIT 1 @@ -56,94 +45,23 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIGMOID 2))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - GatherStats: false - Select Operator - expressions: - expr: SIGMOID(2) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0 - columns.types double - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: src - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src - name: default.src - Truncated Path -> Alias: - /src [src] - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + GatherStats: false + Select Operator + expressions: + expr: SIGMOID(2) + type: double + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: DROP TEMPORARY MACRO SIGMOID @@ -171,33 +89,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION FIXED_NUMBER) 1))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: (FIXED_NUMBER() + 1) - type: int - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: (FIXED_NUMBER() + 1) + type: int + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: EXPLAIN EXTENDED SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1 @@ -208,94 +115,23 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION FIXED_NUMBER) 1))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - GatherStats: false - Select Operator - expressions: - expr: (FIXED_NUMBER() + 1) - type: int - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0 - columns.types int - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: src - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src - name: default.src - Truncated Path -> Alias: - /src [src] - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + GatherStats: false + Select Operator + expressions: + expr: (FIXED_NUMBER() + 1) + type: int + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: DROP TEMPORARY MACRO FIXED_NUMBER @@ -344,33 +180,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIMPLE_ADD 1 9))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: SIMPLE_ADD(1, 9) - type: int - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: SIMPLE_ADD(1, 9) + type: int + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: EXPLAIN EXTENDED SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1 @@ -381,94 +206,23 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIMPLE_ADD 1 9))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - GatherStats: false - Select Operator - expressions: - expr: SIMPLE_ADD(1, 9) - type: int - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0 - columns.types int - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: src - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src - name: default.src - Truncated Path -> Alias: - /src [src] - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + GatherStats: false + Select Operator + expressions: + expr: SIMPLE_ADD(1, 9) + type: int + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: DROP TEMPORARY MACRO SIMPLE_ADD diff --git a/ql/src/test/results/clientpositive/null_cast.q.out b/ql/src/test/results/clientpositive/null_cast.q.out index 20e08cb..16ed38a 100644 --- a/ql/src/test/results/clientpositive/null_cast.q.out +++ b/ql/src/test/results/clientpositive/null_cast.q.out @@ -2,16 +2,16 @@ PREHOOK: query: EXPLAIN SELECT ARRAY(NULL, 0), ARRAY(NULL, ARRAY()), ARRAY(NULL, MAP()), ARRAY(NULL, STRUCT(0)) - FROM src LIMIT 1 + FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT ARRAY(NULL, 0), ARRAY(NULL, ARRAY()), ARRAY(NULL, MAP()), ARRAY(NULL, STRUCT(0)) - FROM src LIMIT 1 + FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL 0)) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION ARRAY))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION MAP))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION STRUCT 0)))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL 0)) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION ARRAY))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION MAP))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION STRUCT 0)))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -24,6 +24,7 @@ STAGE PLANS: src TableScan alias: src + Row Limit Per Split: 1 Select Operator expressions: expr: array(null,0) @@ -35,25 +36,24 @@ STAGE PLANS: expr: array(null,struct(0)) type: array> outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 PREHOOK: query: SELECT ARRAY(NULL, 0), ARRAY(NULL, ARRAY()), ARRAY(NULL, MAP()), ARRAY(NULL, STRUCT(0)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -61,7 +61,7 @@ POSTHOOK: query: SELECT ARRAY(NULL, 0), ARRAY(NULL, ARRAY()), ARRAY(NULL, MAP()), ARRAY(NULL, STRUCT(0)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/num_op_type_conv.q.out b/ql/src/test/results/clientpositive/num_op_type_conv.q.out index e2de911..87feb59 100644 --- a/ql/src/test/results/clientpositive/num_op_type_conv.q.out +++ b/ql/src/test/results/clientpositive/num_op_type_conv.q.out @@ -12,43 +12,32 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ TOK_NULL 7)) (TOK_SELEXPR (- 1.0 TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL)) (TOK_SELEXPR (% (TOK_FUNCTION TOK_BIGINT 21) (TOK_FUNCTION TOK_TINYINT 5))) (TOK_SELEXPR (% (TOK_FUNCTION TOK_BIGINT 21) (TOK_FUNCTION TOK_BIGINT 21))) (TOK_SELEXPR (% 9 "3"))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: (null + 7) - type: int - expr: (1.0 - null) - type: double - expr: (null + null) - type: tinyint - expr: (UDFToLong(21) % UDFToByte(5)) - type: bigint - expr: (UDFToLong(21) % UDFToLong(21)) - type: bigint - expr: (9 % '3') - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: (null + 7) + type: int + expr: (1.0 - null) + type: double + expr: (null + null) + type: tinyint + expr: (UDFToLong(21) % UDFToByte(5)) + type: bigint + expr: (UDFToLong(21) % UDFToLong(21)) + type: bigint + expr: (9 % '3') + type: double + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Limit + ListSink PREHOOK: query: SELECT null + 7, 1.0 - null, null + null, diff --git a/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out b/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out index 93bc891..9893a14 100644 --- a/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out +++ b/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out @@ -15,7 +15,7 @@ PREHOOK: query: -- Create a table with one column write to a partition, then add -- to another partition -- This can produce unexpected results with CombineHiveInputFormat -INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5 +INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_orc@part=1 @@ -23,7 +23,7 @@ POSTHOOK: query: -- Create a table with one column write to a partition, then ad -- to another partition -- This can produce unexpected results with CombineHiveInputFormat -INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5 +INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_orc@part=1 diff --git a/ql/src/test/results/clientpositive/orc_empty_strings.q.out b/ql/src/test/results/clientpositive/orc_empty_strings.q.out index f5a768d..7975ca9 100644 --- a/ql/src/test/results/clientpositive/orc_empty_strings.q.out +++ b/ql/src/test/results/clientpositive/orc_empty_strings.q.out @@ -9,11 +9,11 @@ STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test_orc -PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10 +PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_orc -POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10 +POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_orc @@ -41,11 +41,11 @@ POSTHOOK: Lineage: test_orc.key SIMPLE [] -PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10 +PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_orc -POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10 +POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_orc diff --git a/ql/src/test/results/clientpositive/partcols1.q.out b/ql/src/test/results/clientpositive/partcols1.q.out index 77f5b32..937ab3e 100644 --- a/ql/src/test/results/clientpositive/partcols1.q.out +++ b/ql/src/test/results/clientpositive/partcols1.q.out @@ -4,12 +4,12 @@ POSTHOOK: query: create table test1(col1 string) partitioned by (partitionId int POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test1 PREHOOK: query: insert overwrite table test1 partition (partitionId=1) - select key from src limit 10 + select key from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test1@partitionid=1 POSTHOOK: query: insert overwrite table test1 partition (partitionId=1) - select key from src limit 10 + select key from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test1@partitionid=1 diff --git a/ql/src/test/results/clientpositive/partition_date.q.out b/ql/src/test/results/clientpositive/partition_date.q.out index fa8113d..3462a1b 100644 --- a/ql/src/test/results/clientpositive/partition_date.q.out +++ b/ql/src/test/results/clientpositive/partition_date.q.out @@ -7,25 +7,25 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table partition_date_1 (key string, value string) partitioned by (dt date, region int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@partition_date_1 -PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) - select * from src limit 10 +PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) + select * from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_date_1@dt=2000-01-01/region=1 -POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) - select * from src limit 10 +POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) + select * from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_date_1@dt=2000-01-01/region=1 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) - select * from src limit 5 +PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) + select * from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_date_1@dt=2000-01-01/region=2 -POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) - select * from src limit 5 +POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) + select * from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_date_1@dt=2000-01-01/region=2 @@ -34,12 +34,12 @@ POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).value SIMP POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) - select * from src limit 20 + select * from src tablesample (20 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_date_1@dt=2013-08-08/region=1 POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) - select * from src limit 20 + select * from src tablesample (20 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_date_1@dt=2013-08-08/region=1 @@ -50,12 +50,12 @@ POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).value SIMP POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2013-08-08,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2013-08-08,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) - select * from src limit 11 + select * from src tablesample (11 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_date_1@dt=2013-08-08/region=10 POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) - select * from src limit 11 + select * from src tablesample (11 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_date_1@dt=2013-08-08/region=10 diff --git a/ql/src/test/results/clientpositive/partition_date2.q.out b/ql/src/test/results/clientpositive/partition_date2.q.out index 0aa4d3a..1f220a5 100644 --- a/ql/src/test/results/clientpositive/partition_date2.q.out +++ b/ql/src/test/results/clientpositive/partition_date2.q.out @@ -8,7 +8,7 @@ POSTHOOK: query: create table partition_date2_1 (key string, value string) parti POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@partition_date2_1 PREHOOK: query: -- test date literal syntax -from (select * from src limit 1) x +from (select * from src tablesample (1 rows)) x insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=1) select * insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) select * insert overwrite table partition_date2_1 partition(dt=date '1999-01-01', region=2) select * @@ -18,7 +18,7 @@ PREHOOK: Output: default@partition_date2_1@dt=1999-01-01/region=2 PREHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=1 PREHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=2 POSTHOOK: query: -- test date literal syntax -from (select * from src limit 1) x +from (select * from src tablesample (1 rows)) x insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=1) select * insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) select * insert overwrite table partition_date2_1 partition(dt=date '1999-01-01', region=2) select * @@ -80,13 +80,13 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIM 238 val_238 2000-01-01 2 PREHOOK: query: -- insert overwrite insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) - select 'changed_key', 'changed_value' from src limit 2 + select 'changed_key', 'changed_value' from src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=2 POSTHOOK: query: -- insert overwrite insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) - select 'changed_key', 'changed_value' from src limit 2 + select 'changed_key', 'changed_value' from src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=2 @@ -390,12 +390,12 @@ region int None #### A masked pattern was here #### PREHOOK: query: insert overwrite table partition_date2_1 partition(dt=date '1980-01-02', region=3) - select * from src limit 2 + select * from src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_date2_1@dt=1980-01-02/region=3 POSTHOOK: query: insert overwrite table partition_date2_1 partition(dt=date '1980-01-02', region=3) - select * from src limit 2 + select * from src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_date2_1@dt=1980-01-02/region=3 diff --git a/ql/src/test/results/clientpositive/partition_decode_name.q.out b/ql/src/test/results/clientpositive/partition_decode_name.q.out index 5fbd1cf..910e3ac 100644 --- a/ql/src/test/results/clientpositive/partition_decode_name.q.out +++ b/ql/src/test/results/clientpositive/partition_decode_name.q.out @@ -1,17 +1,17 @@ PREHOOK: query: create table sc as select * -from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 +from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 + select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s + select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src POSTHOOK: query: create table sc as select * -from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 +from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 + select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s + select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: default@sc diff --git a/ql/src/test/results/clientpositive/partition_special_char.q.out b/ql/src/test/results/clientpositive/partition_special_char.q.out index 5013fa0..8a8ee1b 100644 --- a/ql/src/test/results/clientpositive/partition_special_char.q.out +++ b/ql/src/test/results/clientpositive/partition_special_char.q.out @@ -1,17 +1,17 @@ PREHOOK: query: create table sc as select * -from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 +from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 + select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s + select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src POSTHOOK: query: create table sc as select * -from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 +from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 + select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows) union all - select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s + select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: default@sc diff --git a/ql/src/test/results/clientpositive/partition_varchar1.q.out b/ql/src/test/results/clientpositive/partition_varchar1.q.out index e7fef01..fea6561 100644 --- a/ql/src/test/results/clientpositive/partition_varchar1.q.out +++ b/ql/src/test/results/clientpositive/partition_varchar1.q.out @@ -8,24 +8,24 @@ POSTHOOK: query: create table partition_varchar_1 (key string, value varchar(20) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@partition_varchar_1 PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=1) - select * from src limit 10 + select * from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=1 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=1) - select * from src limit 10 + select * from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=1 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=2) - select * from src limit 5 + select * from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=2 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=2) - select * from src limit 5 + select * from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=2 @@ -34,12 +34,12 @@ POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=1).value E POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=1) - select * from src limit 20 + select * from src tablesample (20 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=1 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=1) - select * from src limit 20 + select * from src tablesample (20 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=1 @@ -50,12 +50,12 @@ POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=2).value E POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2013-08-08,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2013-08-08,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=10) - select * from src limit 11 + select * from src tablesample (11 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=10 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=10) - select * from src limit 11 + select * from src tablesample (11 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=10 diff --git a/ql/src/test/results/clientpositive/ppd_union_view.q.out b/ql/src/test/results/clientpositive/ppd_union_view.q.out index 90be396..a1aa2fc 100644 --- a/ql/src/test/results/clientpositive/ppd_union_view.q.out +++ b/ql/src/test/results/clientpositive/ppd_union_view.q.out @@ -12,24 +12,24 @@ POSTHOOK: query: create table t1_new (key string, value string) partitioned by ( POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@t1_new PREHOOK: query: insert overwrite table t1_new partition (ds = '2011-10-15') -select 'key1', 'value1' from src limit 1 +select 'key1', 'value1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1_new@ds=2011-10-15 POSTHOOK: query: insert overwrite table t1_new partition (ds = '2011-10-15') -select 'key1', 'value1' from src limit 1 +select 'key1', 'value1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1_new@ds=2011-10-15 POSTHOOK: Lineage: t1_new PARTITION(ds=2011-10-15).key SIMPLE [] POSTHOOK: Lineage: t1_new PARTITION(ds=2011-10-15).value SIMPLE [] PREHOOK: query: insert overwrite table t1_new partition (ds = '2011-10-16') -select 'key2', 'value2' from src limit 1 +select 'key2', 'value2' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1_new@ds=2011-10-16 POSTHOOK: query: insert overwrite table t1_new partition (ds = '2011-10-16') -select 'key2', 'value2' from src limit 1 +select 'key2', 'value2' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1_new@ds=2011-10-16 @@ -47,12 +47,12 @@ POSTHOOK: Lineage: t1_new PARTITION(ds=2011-10-15).value SIMPLE [] POSTHOOK: Lineage: t1_new PARTITION(ds=2011-10-16).key SIMPLE [] POSTHOOK: Lineage: t1_new PARTITION(ds=2011-10-16).value SIMPLE [] PREHOOK: query: insert overwrite table t1_old partition (ds = '2011-10-13') -select 'keymap3', 'value3' from src limit 1 +select 'keymap3', 'value3' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1_old@ds=2011-10-13 POSTHOOK: query: insert overwrite table t1_old partition (ds = '2011-10-13') -select 'keymap3', 'value3' from src limit 1 +select 'keymap3', 'value3' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1_old@ds=2011-10-13 @@ -63,12 +63,12 @@ POSTHOOK: Lineage: t1_new PARTITION(ds=2011-10-16).value SIMPLE [] POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-13).keymap SIMPLE [] POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-13).value SIMPLE [] PREHOOK: query: insert overwrite table t1_old partition (ds = '2011-10-14') -select 'keymap4', 'value4' from src limit 1 +select 'keymap4', 'value4' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1_old@ds=2011-10-14 POSTHOOK: query: insert overwrite table t1_old partition (ds = '2011-10-14') -select 'keymap4', 'value4' from src limit 1 +select 'keymap4', 'value4' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1_old@ds=2011-10-14 @@ -94,12 +94,12 @@ POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-13).value SIMPLE [] POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-14).keymap SIMPLE [] POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-14).value SIMPLE [] PREHOOK: query: insert overwrite table t1_mapping partition (ds = '2011-10-13') -select 'key3', 'keymap3' from src limit 1 +select 'key3', 'keymap3' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1_mapping@ds=2011-10-13 POSTHOOK: query: insert overwrite table t1_mapping partition (ds = '2011-10-13') -select 'key3', 'keymap3' from src limit 1 +select 'key3', 'keymap3' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1_mapping@ds=2011-10-13 @@ -114,12 +114,12 @@ POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-13).value SIMPLE [] POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-14).keymap SIMPLE [] POSTHOOK: Lineage: t1_old PARTITION(ds=2011-10-14).value SIMPLE [] PREHOOK: query: insert overwrite table t1_mapping partition (ds = '2011-10-14') -select 'key4', 'keymap4' from src limit 1 +select 'key4', 'keymap4' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@t1_mapping@ds=2011-10-14 POSTHOOK: query: insert overwrite table t1_mapping partition (ds = '2011-10-14') -select 'key4', 'keymap4' from src limit 1 +select 'key4', 'keymap4' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1_mapping@ds=2011-10-14 diff --git a/ql/src/test/results/clientpositive/ppr_pushdown.q.out b/ql/src/test/results/clientpositive/ppr_pushdown.q.out index 7eb85a3..57f2f30 100644 --- a/ql/src/test/results/clientpositive/ppr_pushdown.q.out +++ b/ql/src/test/results/clientpositive/ppr_pushdown.q.out @@ -59,41 +59,41 @@ POSTHOOK: query: alter table ppr_test add partition (ds = '12*4') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Input: default@ppr_test POSTHOOK: Output: default@ppr_test@ds=12%2A4 -PREHOOK: query: insert overwrite table ppr_test partition(ds = '1234') select * from (select '1234' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '1234') select * from (select '1234' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=1234 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '1234') select * from (select '1234' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '1234') select * from (select '1234' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=1234 POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '1224') select * from (select '1224' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '1224') select * from (select '1224' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=1224 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '1224') select * from (select '1224' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '1224') select * from (select '1224' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=1224 POSTHOOK: Lineage: ppr_test PARTITION(ds=1224).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '1214') select * from (select '1214' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '1214') select * from (select '1214' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=1214 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '1214') select * from (select '1214' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '1214') select * from (select '1214' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=1214 POSTHOOK: Lineage: ppr_test PARTITION(ds=1214).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1224).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '12+4') select * from (select '12+4' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '12+4') select * from (select '12+4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=12+4 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12+4') select * from (select '12+4' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12+4') select * from (select '12+4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=12+4 @@ -101,11 +101,11 @@ POSTHOOK: Lineage: ppr_test PARTITION(ds=12+4).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1214).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1224).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '12.4') select * from (select '12.4' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '12.4') select * from (select '12.4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=12.4 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12.4') select * from (select '12.4' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12.4') select * from (select '12.4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=12.4 @@ -114,11 +114,11 @@ POSTHOOK: Lineage: ppr_test PARTITION(ds=12.4).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1214).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1224).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '12:4') select * from (select '12:4' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '12:4') select * from (select '12:4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=12%3A4 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12:4') select * from (select '12:4' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12:4') select * from (select '12:4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=12%3A4 @@ -128,11 +128,11 @@ POSTHOOK: Lineage: ppr_test PARTITION(ds=1214).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1224).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=12:4).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '12%4') select * from (select '12%4' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '12%4') select * from (select '12%4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=12%254 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12%4') select * from (select '12%4' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12%4') select * from (select '12%4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=12%254 @@ -143,11 +143,11 @@ POSTHOOK: Lineage: ppr_test PARTITION(ds=1214).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1224).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=1234).key EXPRESSION [] POSTHOOK: Lineage: ppr_test PARTITION(ds=12:4).key EXPRESSION [] -PREHOOK: query: insert overwrite table ppr_test partition(ds = '12*4') select * from (select '12*4' from src limit 1 union all select 'abcd' from src limit 1) s +PREHOOK: query: insert overwrite table ppr_test partition(ds = '12*4') select * from (select '12*4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=12%2A4 -POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12*4') select * from (select '12*4' from src limit 1 union all select 'abcd' from src limit 1) s +POSTHOOK: query: insert overwrite table ppr_test partition(ds = '12*4') select * from (select '12*4' from src tablesample (1 rows) union all select 'abcd' from src tablesample (1 rows)) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=12%2A4 diff --git a/ql/src/test/results/clientpositive/ppr_pushdown2.q.out b/ql/src/test/results/clientpositive/ppr_pushdown2.q.out index aef4e4e..ecd26d3 100644 --- a/ql/src/test/results/clientpositive/ppr_pushdown2.q.out +++ b/ql/src/test/results/clientpositive/ppr_pushdown2.q.out @@ -3,20 +3,20 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table ppr_test (key string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@ppr_test -PREHOOK: query: insert overwrite table ppr_test partition(ds='2') select '2' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test partition(ds='2') select '2' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=2 -POSTHOOK: query: insert overwrite table ppr_test partition(ds='2') select '2' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test partition(ds='2') select '2' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=2 POSTHOOK: Lineage: ppr_test PARTITION(ds=2).key SIMPLE [] -PREHOOK: query: insert overwrite table ppr_test partition(ds='22') select '22' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test partition(ds='22') select '22' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test@ds=22 -POSTHOOK: query: insert overwrite table ppr_test partition(ds='22') select '22' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test partition(ds='22') select '22' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test@ds=22 @@ -55,22 +55,22 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@ppr_test2 POSTHOOK: Lineage: ppr_test PARTITION(ds=22).key SIMPLE [] POSTHOOK: Lineage: ppr_test PARTITION(ds=2).key SIMPLE [] -PREHOOK: query: insert overwrite table ppr_test2 partition(ds='1', s='2') select '1' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test2 partition(ds='1', s='2') select '1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test2@ds=1/s=2 -POSTHOOK: query: insert overwrite table ppr_test2 partition(ds='1', s='2') select '1' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test2 partition(ds='1', s='2') select '1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test2@ds=1/s=2 POSTHOOK: Lineage: ppr_test PARTITION(ds=22).key SIMPLE [] POSTHOOK: Lineage: ppr_test PARTITION(ds=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test2 PARTITION(ds=1,s=2).key SIMPLE [] -PREHOOK: query: insert overwrite table ppr_test2 partition(ds='2', s='1') select '2' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test2 partition(ds='2', s='1') select '2' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test2@ds=2/s=1 -POSTHOOK: query: insert overwrite table ppr_test2 partition(ds='2', s='1') select '2' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test2 partition(ds='2', s='1') select '2' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test2@ds=2/s=1 @@ -117,11 +117,11 @@ POSTHOOK: Lineage: ppr_test PARTITION(ds=22).key SIMPLE [] POSTHOOK: Lineage: ppr_test PARTITION(ds=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test2 PARTITION(ds=1,s=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test2 PARTITION(ds=2,s=1).key SIMPLE [] -PREHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '3') select '1' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '3') select '1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test3@col=1/ol=2/l=3 -POSTHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '3') select '1' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '3') select '1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test3@col=1/ol=2/l=3 @@ -130,11 +130,11 @@ POSTHOOK: Lineage: ppr_test PARTITION(ds=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test2 PARTITION(ds=1,s=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test2 PARTITION(ds=2,s=1).key SIMPLE [] POSTHOOK: Lineage: ppr_test3 PARTITION(col=1,ol=2,l=3).key SIMPLE [] -PREHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='1', l = '2') select '2' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='1', l = '2') select '2' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test3@col=1/ol=1/l=2 -POSTHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='1', l = '2') select '2' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='1', l = '2') select '2' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test3@col=1/ol=1/l=2 @@ -144,11 +144,11 @@ POSTHOOK: Lineage: ppr_test2 PARTITION(ds=1,s=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test2 PARTITION(ds=2,s=1).key SIMPLE [] POSTHOOK: Lineage: ppr_test3 PARTITION(col=1,ol=1,l=2).key SIMPLE [] POSTHOOK: Lineage: ppr_test3 PARTITION(col=1,ol=2,l=3).key SIMPLE [] -PREHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '1') select '3' from src limit 1 +PREHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '1') select '3' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@ppr_test3@col=1/ol=2/l=1 -POSTHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '1') select '3' from src limit 1 +POSTHOOK: query: insert overwrite table ppr_test3 partition(col='1', ol='2', l = '1') select '3' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@ppr_test3@col=1/ol=2/l=1 diff --git a/ql/src/test/results/clientpositive/quote2.q.out b/ql/src/test/results/clientpositive/quote2.q.out index 220100f..a19de4a 100644 --- a/ql/src/test/results/clientpositive/quote2.q.out +++ b/ql/src/test/results/clientpositive/quote2.q.out @@ -32,69 +32,58 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'abc') (TOK_SELEXPR "abc") (TOK_SELEXPR 'abc\'') (TOK_SELEXPR "abc\"") (TOK_SELEXPR 'abc\\') (TOK_SELEXPR "abc\\") (TOK_SELEXPR 'abc\\\'') (TOK_SELEXPR "abc\\\"") (TOK_SELEXPR 'abc\\\\') (TOK_SELEXPR "abc\\\\") (TOK_SELEXPR 'abc\\\\\'') (TOK_SELEXPR "abc\\\\\"") (TOK_SELEXPR 'abc\\\\\\') (TOK_SELEXPR "abc\\\\\\") (TOK_SELEXPR 'abc""""\\') (TOK_SELEXPR "abc''''\\") (TOK_SELEXPR "awk '{print NR\"\\t\"$0}'") (TOK_SELEXPR 'tab\ttab') (TOK_SELEXPR "tab\ttab")) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: 'abc' - type: string - expr: 'abc' - type: string - expr: 'abc'' - type: string - expr: 'abc"' - type: string - expr: 'abc\' - type: string - expr: 'abc\' - type: string - expr: 'abc\'' - type: string - expr: 'abc\"' - type: string - expr: 'abc\\' - type: string - expr: 'abc\\' - type: string - expr: 'abc\\'' - type: string - expr: 'abc\\"' - type: string - expr: 'abc\\\' - type: string - expr: 'abc\\\' - type: string - expr: 'abc""""\' - type: string - expr: 'abc''''\' - type: string - expr: 'awk '{print NR"\t"$0}'' - type: string - expr: 'tab tab' - type: string - expr: 'tab tab' - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: 'abc' + type: string + expr: 'abc' + type: string + expr: 'abc'' + type: string + expr: 'abc"' + type: string + expr: 'abc\' + type: string + expr: 'abc\' + type: string + expr: 'abc\'' + type: string + expr: 'abc\"' + type: string + expr: 'abc\\' + type: string + expr: 'abc\\' + type: string + expr: 'abc\\'' + type: string + expr: 'abc\\"' + type: string + expr: 'abc\\\' + type: string + expr: 'abc\\\' + type: string + expr: 'abc""""\' + type: string + expr: 'abc''''\' + type: string + expr: 'awk '{print NR"\t"$0}'' + type: string + expr: 'tab tab' + type: string + expr: 'tab tab' + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18 + Limit + ListSink PREHOOK: query: SELECT diff --git a/ql/src/test/results/clientpositive/str_to_map.q.out b/ql/src/test/results/clientpositive/str_to_map.q.out index 3a50ab4..6882fc7 100644 --- a/ql/src/test/results/clientpositive/str_to_map.q.out +++ b/ql/src/test/results/clientpositive/str_to_map.q.out @@ -17,33 +17,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_FUNCTION str_to_map 'a=1,b=2,c=3' ',' '=') 'a'))) (TOK_LIMIT 3))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: str_to_map('a=1,b=2,c=3',',','=')['a'] - type: string - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 3 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: str_to_map('a=1,b=2,c=3',',','=')['a'] + type: string + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3 @@ -65,33 +54,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION str_to_map 'a:1,b:2,c:3'))) (TOK_LIMIT 3))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: str_to_map('a:1,b:2,c:3') - type: map - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 3 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: str_to_map('a:1,b:2,c:3') + type: map + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3 @@ -113,33 +91,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION str_to_map 'a:1,b:2,c:3' ',' ':'))) (TOK_LIMIT 3))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: str_to_map('a:1,b:2,c:3',',',':') - type: map - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 3 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: str_to_map('a:1,b:2,c:3',',',':') + type: map + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3 @@ -224,10 +191,10 @@ PREHOOK: query: drop table tbl_s2m PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tbl_s2m POSTHOOK: type: DROPTABLE -PREHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src limit 3 +PREHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows) PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src -POSTHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src limit 3 +POSTHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows) POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: default@tbl_s2m diff --git a/ql/src/test/results/clientpositive/test_boolean_whereclause.q.out b/ql/src/test/results/clientpositive/test_boolean_whereclause.q.out index c8ea5cd..9bb9446 100644 --- a/ql/src/test/results/clientpositive/test_boolean_whereclause.q.out +++ b/ql/src/test/results/clientpositive/test_boolean_whereclause.q.out @@ -3,11 +3,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table if not exists test_boolean(dummy tinyint) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test_boolean -PREHOOK: query: insert overwrite table test_boolean select 1 from src limit 1 +PREHOOK: query: insert overwrite table test_boolean select 1 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_boolean -POSTHOOK: query: insert overwrite table test_boolean select 1 from src limit 1 +POSTHOOK: query: insert overwrite table test_boolean select 1 from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_boolean diff --git a/ql/src/test/results/clientpositive/timestamp_1.q.out b/ql/src/test/results/clientpositive/timestamp_1.q.out index cf62457..3768ec2 100644 --- a/ql/src/test/results/clientpositive/timestamp_1.q.out +++ b/ql/src/test/results/clientpositive/timestamp_1.q.out @@ -15,13 +15,13 @@ POSTHOOK: query: alter table timestamp_1 set serde 'org.apache.hadoop.hive.serde POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@timestamp_1 POSTHOOK: Output: default@timestamp_1 -PREHOOK: query: insert overwrite table timestamp_1 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 +PREHOOK: query: insert overwrite table timestamp_1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 -POSTHOOK: query: insert overwrite table timestamp_1 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 +POSTHOOK: query: insert overwrite table timestamp_1 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -107,12 +107,12 @@ POSTHOOK: Input: default@timestamp_1 POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -207,12 +207,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -316,12 +316,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01.1 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -434,12 +434,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 @@ -561,12 +561,12 @@ POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] POSTHOOK: Lineage: timestamp_1.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_1 POSTHOOK: query: insert overwrite table timestamp_1 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_1 diff --git a/ql/src/test/results/clientpositive/timestamp_2.q.out b/ql/src/test/results/clientpositive/timestamp_2.q.out index 1422bac..8648b87 100644 --- a/ql/src/test/results/clientpositive/timestamp_2.q.out +++ b/ql/src/test/results/clientpositive/timestamp_2.q.out @@ -15,13 +15,13 @@ POSTHOOK: query: alter table timestamp_2 set serde 'org.apache.hadoop.hive.serde POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@timestamp_2 POSTHOOK: Output: default@timestamp_2 -PREHOOK: query: insert overwrite table timestamp_2 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 +PREHOOK: query: insert overwrite table timestamp_2 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 -POSTHOOK: query: insert overwrite table timestamp_2 - select cast('2011-01-01 01:01:01' as timestamp) from src limit 1 +POSTHOOK: query: insert overwrite table timestamp_2 + select cast('2011-01-01 01:01:01' as timestamp) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -107,12 +107,12 @@ POSTHOOK: Input: default@timestamp_2 POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01' from src limit 1 + select '2011-01-01 01:01:01' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -207,12 +207,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.1' from src limit 1 + select '2011-01-01 01:01:01.1' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -316,12 +316,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01.1 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.0001' from src limit 1 + select '2011-01-01 01:01:01.0001' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -434,12 +434,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.000100000' from src limit 1 + select '2011-01-01 01:01:01.000100000' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 @@ -561,12 +561,12 @@ POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] POSTHOOK: Lineage: timestamp_2.t EXPRESSION [] 2011-01-01 01:01:01.0001 PREHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_2 POSTHOOK: query: insert overwrite table timestamp_2 - select '2011-01-01 01:01:01.001000011' from src limit 1 + select '2011-01-01 01:01:01.001000011' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_2 diff --git a/ql/src/test/results/clientpositive/timestamp_3.q.out b/ql/src/test/results/clientpositive/timestamp_3.q.out index 0a042fb..8544307 100644 --- a/ql/src/test/results/clientpositive/timestamp_3.q.out +++ b/ql/src/test/results/clientpositive/timestamp_3.q.out @@ -15,13 +15,13 @@ POSTHOOK: query: alter table timestamp_3 set serde 'org.apache.hadoop.hive.serde POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: default@timestamp_3 POSTHOOK: Output: default@timestamp_3 -PREHOOK: query: insert overwrite table timestamp_3 - select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1 +PREHOOK: query: insert overwrite table timestamp_3 + select cast(cast('1.3041352164485E9' as double) as timestamp) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_3 -POSTHOOK: query: insert overwrite table timestamp_3 - select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1 +POSTHOOK: query: insert overwrite table timestamp_3 + select cast(cast('1.3041352164485E9' as double) as timestamp) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_3 diff --git a/ql/src/test/results/clientpositive/timestamp_comparison.q.out b/ql/src/test/results/clientpositive/timestamp_comparison.q.out index ca45f18..8456b42 100644 --- a/ql/src/test/results/clientpositive/timestamp_comparison.q.out +++ b/ql/src/test/results/clientpositive/timestamp_comparison.q.out @@ -1,9 +1,9 @@ -PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) > +PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) > cast('2011-05-06 07:08:09' as timestamp) from src limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) > +POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) > cast('2011-05-06 07:08:09' as timestamp) from src limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src diff --git a/ql/src/test/results/clientpositive/timestamp_lazy.q.out b/ql/src/test/results/clientpositive/timestamp_lazy.q.out index fff9784..62a5ab7 100644 --- a/ql/src/test/results/clientpositive/timestamp_lazy.q.out +++ b/ql/src/test/results/clientpositive/timestamp_lazy.q.out @@ -7,11 +7,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table timestamp_lazy (t timestamp, key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@timestamp_lazy -PREHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src limit 5 +PREHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_lazy -POSTHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src limit 5 +POSTHOOK: query: insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as timestamp), key, value from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_lazy diff --git a/ql/src/test/results/clientpositive/timestamp_udf.q.out b/ql/src/test/results/clientpositive/timestamp_udf.q.out index c04de7a..8d16c48 100644 --- a/ql/src/test/results/clientpositive/timestamp_udf.q.out +++ b/ql/src/test/results/clientpositive/timestamp_udf.q.out @@ -16,20 +16,20 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table timestamp_udf_string (t string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@timestamp_udf_string -PREHOOK: query: from src +PREHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' insert overwrite table timestamp_udf_string - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_udf PREHOOK: Output: default@timestamp_udf_string -POSTHOOK: query: from src +POSTHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' insert overwrite table timestamp_udf_string - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_udf diff --git a/ql/src/test/results/clientpositive/transform1.q.out b/ql/src/test/results/clientpositive/transform1.q.out index 9cd0a99..4bfcb2d 100644 --- a/ql/src/test/results/clientpositive/transform1.q.out +++ b/ql/src/test/results/clientpositive/transform1.q.out @@ -63,12 +63,12 @@ POSTHOOK: query: create table transform1_t2(col array) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@transform1_t2 PREHOOK: query: insert overwrite table transform1_t2 -select array(1,2,3) from src limit 1 +select array(1,2,3) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@transform1_t2 POSTHOOK: query: insert overwrite table transform1_t2 -select array(1,2,3) from src limit 1 +select array(1,2,3) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@transform1_t2 diff --git a/ql/src/test/results/clientpositive/truncate_column.q.out b/ql/src/test/results/clientpositive/truncate_column.q.out index 84be87c..00ee55b 100644 --- a/ql/src/test/results/clientpositive/truncate_column.q.out +++ b/ql/src/test/results/clientpositive/truncate_column.q.out @@ -9,11 +9,11 @@ CREATE TABLE test_tab (key STRING, value STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test_tab -PREHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10 +PREHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_tab -POSTHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10 +POSTHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_tab @@ -146,12 +146,12 @@ NULL val_484 NULL val_86 NULL val_98 PREHOOK: query: -- Truncate multiple columns -INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10 +INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_tab POSTHOOK: query: -- Truncate multiple columns -INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10 +INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_tab @@ -323,11 +323,11 @@ POSTHOOK: Lineage: test_tab.key SIMPLE [(src)src.FieldSchema(name:key, type:stri POSTHOOK: Lineage: test_tab.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_tab.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_tab.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10 +PREHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_tab -POSTHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 10 +POSTHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_tab @@ -581,11 +581,11 @@ POSTHOOK: Lineage: test_tab.key SIMPLE [(src)src.FieldSchema(name:key, type:stri POSTHOOK: Lineage: test_tab.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_tab.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_tab.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: INSERT OVERWRITE TABLE test_tab_part PARTITION (part = '1') SELECT * FROM src LIMIT 10 +PREHOOK: query: INSERT OVERWRITE TABLE test_tab_part PARTITION (part = '1') SELECT * FROM src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_tab_part@part=1 -POSTHOOK: query: INSERT OVERWRITE TABLE test_tab_part PARTITION (part = '1') SELECT * FROM src LIMIT 10 +POSTHOOK: query: INSERT OVERWRITE TABLE test_tab_part PARTITION (part = '1') SELECT * FROM src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_tab_part@part=1 diff --git a/ql/src/test/results/clientpositive/truncate_column_merge.q.out b/ql/src/test/results/clientpositive/truncate_column_merge.q.out index bfb0340..a0a7e99 100644 --- a/ql/src/test/results/clientpositive/truncate_column_merge.q.out +++ b/ql/src/test/results/clientpositive/truncate_column_merge.q.out @@ -7,21 +7,21 @@ POSTHOOK: query: -- Tests truncating a column from a table with multiple files, CREATE TABLE test_tab (key STRING, value STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test_tab -PREHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 5 +PREHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_tab -POSTHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src LIMIT 5 +POSTHOOK: query: INSERT OVERWRITE TABLE test_tab SELECT * FROM src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_tab POSTHOOK: Lineage: test_tab.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_tab.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: INSERT INTO TABLE test_tab SELECT * FROM src LIMIT 5 +PREHOOK: query: INSERT INTO TABLE test_tab SELECT * FROM src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_tab -POSTHOOK: query: INSERT INTO TABLE test_tab SELECT * FROM src LIMIT 5 +POSTHOOK: query: INSERT INTO TABLE test_tab SELECT * FROM src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_tab diff --git a/ql/src/test/results/clientpositive/type_cast_1.q.out b/ql/src/test/results/clientpositive/type_cast_1.q.out index ab3d4c3..3ab8f2f 100644 --- a/ql/src/test/results/clientpositive/type_cast_1.q.out +++ b/ql/src/test/results/clientpositive/type_cast_1.q.out @@ -8,33 +8,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION IF false 1 (TOK_FUNCTION TOK_SMALLINT 2)) 3))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: (if(false, 1, UDFToShort(2)) + 3) - type: int - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: (if(false, 1, UDFToShort(2)) + 3) + type: int + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: SELECT IF(false, 1, cast(2 as smallint)) + 3 FROM src LIMIT 1 diff --git a/ql/src/test/results/clientpositive/type_widening.q.out b/ql/src/test/results/clientpositive/type_widening.q.out index 97806d0..f8b96e8 100644 --- a/ql/src/test/results/clientpositive/type_widening.q.out +++ b/ql/src/test/results/clientpositive/type_widening.q.out @@ -8,33 +8,22 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE 0 9223372036854775807))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: COALESCE(0,9223372036854775807) - type: bigint - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Select Operator + expressions: + expr: COALESCE(0,9223372036854775807) + type: bigint + outputColumnNames: _col0 + Limit + ListSink PREHOOK: query: SELECT COALESCE(0, 9223372036854775807) FROM src LIMIT 1 diff --git a/ql/src/test/results/clientpositive/udf_E.q.out b/ql/src/test/results/clientpositive/udf_E.q.out index 795934e..ae94b13 100644 --- a/ql/src/test/results/clientpositive/udf_E.q.out +++ b/ql/src/test/results/clientpositive/udf_E.q.out @@ -1,47 +1,36 @@ -PREHOOK: query: explain -select E() FROM src LIMIT 1 +PREHOOK: query: explain +select E() FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: explain -select E() FROM src LIMIT 1 +POSTHOOK: query: explain +select E() FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION E))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION E))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: e() - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: e() + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select E() FROM src LIMIT 1 +PREHOOK: query: select E() FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select E() FROM src LIMIT 1 +POSTHOOK: query: select E() FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -61,49 +50,38 @@ Example: > SELECT E() FROM src LIMIT 1; 2.718281828459045 PREHOOK: query: explain -select E() FROM src LIMIT 1 +select E() FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: explain -select E() FROM src LIMIT 1 +select E() FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION E))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION E))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: e() - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: e() + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select E() FROM src LIMIT 1 +PREHOOK: query: select E() FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select E() FROM src LIMIT 1 +POSTHOOK: query: select E() FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_PI.q.out b/ql/src/test/results/clientpositive/udf_PI.q.out index 78f8d0f..f579730 100644 --- a/ql/src/test/results/clientpositive/udf_PI.q.out +++ b/ql/src/test/results/clientpositive/udf_PI.q.out @@ -1,47 +1,36 @@ -PREHOOK: query: explain -select PI() FROM src LIMIT 1 +PREHOOK: query: explain +select PI() FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: explain -select PI() FROM src LIMIT 1 +POSTHOOK: query: explain +select PI() FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION PI))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION PI))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: pi() - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: pi() + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select PI() FROM src LIMIT 1 +PREHOOK: query: select PI() FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select PI() FROM src LIMIT 1 +POSTHOOK: query: select PI() FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -61,49 +50,38 @@ Example: > SELECT PI() FROM src LIMIT 1; 3.14159... PREHOOK: query: explain -select PI() FROM src LIMIT 1 +select PI() FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: explain -select PI() FROM src LIMIT 1 +select PI() FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION PI))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION PI))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: pi() - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: pi() + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select PI() FROM src LIMIT 1 +PREHOOK: query: select PI() FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select PI() FROM src LIMIT 1 +POSTHOOK: query: select PI() FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_abs.q.out b/ql/src/test/results/clientpositive/udf_abs.q.out index 95d397c..38df6f7 100644 --- a/ql/src/test/results/clientpositive/udf_abs.q.out +++ b/ql/src/test/results/clientpositive/udf_abs.q.out @@ -19,7 +19,7 @@ PREHOOK: query: EXPLAIN SELECT abs(123), abs(-9223372036854775807), abs(9223372036854775807) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT abs(0), @@ -27,47 +27,36 @@ POSTHOOK: query: EXPLAIN SELECT abs(123), abs(-9223372036854775807), abs(9223372036854775807) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs 0)) (TOK_SELEXPR (TOK_FUNCTION abs (- 1))) (TOK_SELEXPR (TOK_FUNCTION abs 123)) (TOK_SELEXPR (TOK_FUNCTION abs (- 9223372036854775807))) (TOK_SELEXPR (TOK_FUNCTION abs 9223372036854775807))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs 0)) (TOK_SELEXPR (TOK_FUNCTION abs (- 1))) (TOK_SELEXPR (TOK_FUNCTION abs 123)) (TOK_SELEXPR (TOK_FUNCTION abs (- 9223372036854775807))) (TOK_SELEXPR (TOK_FUNCTION abs 9223372036854775807))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: abs(0) - type: int - expr: abs((- 1)) - type: int - expr: abs(123) - type: int - expr: abs((- 9223372036854775807)) - type: bigint - expr: abs(9223372036854775807) - type: bigint - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: abs(0) + type: int + expr: abs((- 1)) + type: int + expr: abs(123) + type: int + expr: abs((- 9223372036854775807)) + type: bigint + expr: abs(9223372036854775807) + type: bigint + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + ListSink PREHOOK: query: SELECT @@ -76,7 +65,7 @@ PREHOOK: query: SELECT abs(123), abs(-9223372036854775807), abs(9223372036854775807) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -86,7 +75,7 @@ POSTHOOK: query: SELECT abs(123), abs(-9223372036854775807), abs(9223372036854775807) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -95,56 +84,45 @@ PREHOOK: query: EXPLAIN SELECT abs(0.0), abs(-3.14159265), abs(3.14159265) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT abs(0.0), abs(-3.14159265), abs(3.14159265) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs 0.0)) (TOK_SELEXPR (TOK_FUNCTION abs (- 3.14159265))) (TOK_SELEXPR (TOK_FUNCTION abs 3.14159265))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs 0.0)) (TOK_SELEXPR (TOK_FUNCTION abs (- 3.14159265))) (TOK_SELEXPR (TOK_FUNCTION abs 3.14159265))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: abs(0.0) - type: double - expr: abs((- 3.14159265)) - type: double - expr: abs(3.14159265) - type: double - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: abs(0.0) + type: double + expr: abs((- 3.14159265)) + type: double + expr: abs(3.14159265) + type: double + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT abs(0.0), abs(-3.14159265), abs(3.14159265) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -152,7 +130,7 @@ POSTHOOK: query: SELECT abs(0.0), abs(-3.14159265), abs(3.14159265) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_acos.q.out b/ql/src/test/results/clientpositive/udf_acos.q.out index 187e3d4..311291a 100644 --- a/ql/src/test/results/clientpositive/udf_acos.q.out +++ b/ql/src/test/results/clientpositive/udf_acos.q.out @@ -14,45 +14,45 @@ Example: > SELECT acos(2) FROM src LIMIT 1; NULL PREHOOK: query: SELECT acos(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT acos(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT acos(0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT acos(0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.5707963267948966 PREHOOK: query: SELECT acos(-0.5), asin(0.66) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT acos(-0.5), asin(0.66) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 2.0943951023931957 0.7208187608700897 PREHOOK: query: SELECT acos(2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT acos(2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_array.q.out b/ql/src/test/results/clientpositive/udf_array.q.out index e44432f..47b2ee8 100644 --- a/ql/src/test/results/clientpositive/udf_array.q.out +++ b/ql/src/test/results/clientpositive/udf_array.q.out @@ -9,63 +9,52 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTENDED array POSTHOOK: type: DESCFUNCTION array(n0, n1...) - Creates an array with the given elements PREHOOK: query: EXPLAIN SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2], -array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1 +array(array(1), array(2), array(3), array(4))[1][0] FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2], -array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1 +array(array(1), array(2), array(3), array(4))[1][0] FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION array)) (TOK_SELEXPR ([ (TOK_FUNCTION array) 1)) (TOK_SELEXPR (TOK_FUNCTION array 1 2 3)) (TOK_SELEXPR ([ (TOK_FUNCTION array 1 2 3) 2)) (TOK_SELEXPR (TOK_FUNCTION array 1 "a" 2 3)) (TOK_SELEXPR ([ (TOK_FUNCTION array 1 "a" 2 3) 2)) (TOK_SELEXPR ([ ([ (TOK_FUNCTION array (TOK_FUNCTION array 1) (TOK_FUNCTION array 2) (TOK_FUNCTION array 3) (TOK_FUNCTION array 4)) 1) 0))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION array)) (TOK_SELEXPR ([ (TOK_FUNCTION array) 1)) (TOK_SELEXPR (TOK_FUNCTION array 1 2 3)) (TOK_SELEXPR ([ (TOK_FUNCTION array 1 2 3) 2)) (TOK_SELEXPR (TOK_FUNCTION array 1 "a" 2 3)) (TOK_SELEXPR ([ (TOK_FUNCTION array 1 "a" 2 3) 2)) (TOK_SELEXPR ([ ([ (TOK_FUNCTION array (TOK_FUNCTION array 1) (TOK_FUNCTION array 2) (TOK_FUNCTION array 3) (TOK_FUNCTION array 4)) 1) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: array() - type: array - expr: array()[1] - type: string - expr: array(1,2,3) - type: array - expr: array(1,2,3)[2] - type: int - expr: array(1,'a',2,3) - type: array - expr: array(1,'a',2,3)[2] - type: string - expr: array(array(1),array(2),array(3),array(4))[1][0] - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: array() + type: array + expr: array()[1] + type: string + expr: array(1,2,3) + type: array + expr: array(1,2,3)[2] + type: int + expr: array(1,'a',2,3) + type: array + expr: array(1,'a',2,3)[2] + type: string + expr: array(array(1),array(2),array(3),array(4))[1][0] + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + ListSink PREHOOK: query: SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2], -array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1 +array(array(1), array(2), array(3), array(4))[1][0] FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2], -array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1 +array(array(1), array(2), array(3), array(4))[1][0] FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_array_contains.q.out b/ql/src/test/results/clientpositive/udf_array_contains.q.out index 5ff0b17..63aafca 100644 --- a/ql/src/test/results/clientpositive/udf_array_contains.q.out +++ b/ql/src/test/results/clientpositive/udf_array_contains.q.out @@ -12,25 +12,25 @@ Example: > SELECT array_contains(array(1, 2, 3), 2) FROM src LIMIT 1; true PREHOOK: query: -- evalutes function for array of primitives -SELECT array_contains(array(1, 2, 3), 1) FROM src LIMIT 1 +SELECT array_contains(array(1, 2, 3), 1) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- evalutes function for array of primitives -SELECT array_contains(array(1, 2, 3), 1) FROM src LIMIT 1 +SELECT array_contains(array(1, 2, 3), 1) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true PREHOOK: query: -- evaluates function for nested arrays SELECT array_contains(array(array(1,2), array(2,3), array(3,4)), array(1,2)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- evaluates function for nested arrays SELECT array_contains(array(array(1,2), array(2,3), array(3,4)), array(1,2)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_ascii.q.out b/ql/src/test/results/clientpositive/udf_ascii.q.out index e3724f5..cc51f80 100644 --- a/ql/src/test/results/clientpositive/udf_ascii.q.out +++ b/ql/src/test/results/clientpositive/udf_ascii.q.out @@ -17,56 +17,45 @@ PREHOOK: query: EXPLAIN SELECT ascii('Facebook'), ascii(''), ascii('!') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT ascii('Facebook'), ascii(''), ascii('!') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ascii 'Facebook')) (TOK_SELEXPR (TOK_FUNCTION ascii '')) (TOK_SELEXPR (TOK_FUNCTION ascii '!'))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ascii 'Facebook')) (TOK_SELEXPR (TOK_FUNCTION ascii '')) (TOK_SELEXPR (TOK_FUNCTION ascii '!'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: ascii('Facebook') - type: int - expr: ascii('') - type: int - expr: ascii('!') - type: int - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: ascii('Facebook') + type: int + expr: ascii('') + type: int + expr: ascii('!') + type: int + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT ascii('Facebook'), ascii(''), ascii('!') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -74,7 +63,7 @@ POSTHOOK: query: SELECT ascii('Facebook'), ascii(''), ascii('!') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_asin.q.out b/ql/src/test/results/clientpositive/udf_asin.q.out index e90cab8..39659af 100644 --- a/ql/src/test/results/clientpositive/udf_asin.q.out +++ b/ql/src/test/results/clientpositive/udf_asin.q.out @@ -14,45 +14,45 @@ Example: > SELECT asin(2) FROM src LIMIT 1; NULL PREHOOK: query: SELECT asin(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT asin(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT asin(0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT asin(0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0.0 PREHOOK: query: SELECT asin(-0.5), asin(0.66) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT asin(-0.5), asin(0.66) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -0.5235987755982989 0.7208187608700897 PREHOOK: query: SELECT asin(2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT asin(2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_atan.q.out b/ql/src/test/results/clientpositive/udf_atan.q.out index 448af50..cc1bf35 100644 --- a/ql/src/test/results/clientpositive/udf_atan.q.out +++ b/ql/src/test/results/clientpositive/udf_atan.q.out @@ -12,23 +12,23 @@ Example: > SELECT atan(0) FROM src LIMIT 1; 0 PREHOOK: query: SELECT atan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT atan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT atan(1), atan(6), atan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT atan(1), atan(6), atan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -47,23 +47,23 @@ Example: > SELECT atan(0) FROM src LIMIT 1; 0 PREHOOK: query: SELECT atan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT atan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT atan(1), atan(6), atan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT atan(1), atan(6), atan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_between.q.out b/ql/src/test/results/clientpositive/udf_between.q.out index 70d479b..e8561f7 100644 --- a/ql/src/test/results/clientpositive/udf_between.q.out +++ b/ql/src/test/results/clientpositive/udf_between.q.out @@ -16,39 +16,28 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_FALSE (+ (TOK_TABLE_OR_COL key) 100) (+ 150 (- 50)) (+ 150 50))) (TOK_LIMIT 20))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key + 100) BETWEEN (150 + (- 50)) AND (150 + 50) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 20 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key + 100) BETWEEN (150 + (- 50)) AND (150 + 50) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + ListSink PREHOOK: query: SELECT * FROM src where key + 100 between (150 + -50) AND (150 + 50) LIMIT 20 @@ -87,39 +76,28 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_TRUE (+ (TOK_TABLE_OR_COL key) 100) (+ 150 (- 50)) (+ 150 50))) (TOK_LIMIT 20))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key + 100) NOT BETWEEN (150 + (- 50)) AND (150 + 50) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 20 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key + 100) NOT BETWEEN (150 + (- 50)) AND (150 + 50) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + ListSink PREHOOK: query: SELECT * FROM src where key + 100 not between (150 + -50) AND (150 + 50) LIMIT 20 @@ -158,39 +136,28 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_FALSE 'b' 'a' 'c')) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: 'b' BETWEEN 'a' AND 'c' - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: 'b' BETWEEN 'a' AND 'c' + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + ListSink PREHOOK: query: SELECT * FROM src where 'b' between 'a' AND 'c' LIMIT 1 @@ -210,39 +177,28 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_FALSE 2 2 '3')) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: 2 BETWEEN 2 AND '3' - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: 2 BETWEEN 2 AND '3' + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + ListSink PREHOOK: query: SELECT * FROM src where 2 between 2 AND '3' LIMIT 1 diff --git a/ql/src/test/results/clientpositive/udf_bin.q.out b/ql/src/test/results/clientpositive/udf_bin.q.out index 0e4a8d0..e83df28 100644 --- a/ql/src/test/results/clientpositive/udf_bin.q.out +++ b/ql/src/test/results/clientpositive/udf_bin.q.out @@ -16,7 +16,7 @@ PREHOOK: query: SELECT bin(1), bin(0), bin(99992421) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -24,18 +24,18 @@ POSTHOOK: query: SELECT bin(1), bin(0), bin(99992421) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 0 101111101011100001101100101 PREHOOK: query: -- Negative numbers should be treated as two's complement (64 bit). -SELECT bin(-5) FROM src LIMIT 1 +SELECT bin(-5) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Negative numbers should be treated as two's complement (64 bit). -SELECT bin(-5) FROM src LIMIT 1 +SELECT bin(-5) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_bitmap_and.q.out b/ql/src/test/results/clientpositive/udf_bitmap_and.q.out index 37b9f01..01f4840 100644 --- a/ql/src/test/results/clientpositive/udf_bitmap_and.q.out +++ b/ql/src/test/results/clientpositive/udf_bitmap_and.q.out @@ -1,17 +1,17 @@ -PREHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src limit 1 +PREHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src limit 1 +POSTHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### [13,2,4,8589934592,4096,0] -PREHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src limit 1 +PREHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src limit 1 +POSTHOOK: query: select ewah_bitmap_and(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -26,12 +26,12 @@ POSTHOOK: query: create table bitmap_test (a array, b array) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@bitmap_test PREHOOK: query: insert overwrite table bitmap_test -select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src limit 10 +select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@bitmap_test POSTHOOK: query: insert overwrite table bitmap_test -select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src limit 10 +select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bitmap_test diff --git a/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out b/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out index ad82a6e..ca96e78 100644 --- a/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out +++ b/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out @@ -1,17 +1,17 @@ -PREHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src limit 1 +PREHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src limit 1 +POSTHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src limit 1 +PREHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src limit 1 +POSTHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_bitmap_or.q.out b/ql/src/test/results/clientpositive/udf_bitmap_or.q.out index b01723f..9f06f6b 100644 --- a/ql/src/test/results/clientpositive/udf_bitmap_or.q.out +++ b/ql/src/test/results/clientpositive/udf_bitmap_or.q.out @@ -1,17 +1,17 @@ -PREHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src limit 1 +PREHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src limit 1 +POSTHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(13,2,4,8589934592,4096,0)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### [13,2,4,8589934592,4096,0] -PREHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src limit 1 +PREHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src limit 1 +POSTHOOK: query: select ewah_bitmap_or(array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -26,12 +26,12 @@ POSTHOOK: query: create table bitmap_test (a array, b array) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@bitmap_test PREHOOK: query: insert overwrite table bitmap_test -select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src limit 10 +select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@bitmap_test POSTHOOK: query: insert overwrite table bitmap_test -select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src limit 10 +select array(13,2,4,8589934592,4096,0), array(8,2,4,8589934592,128,0) from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bitmap_test diff --git a/ql/src/test/results/clientpositive/udf_case.q.out b/ql/src/test/results/clientpositive/udf_case.q.out index 0b8c542..4a59a8c 100644 --- a/ql/src/test/results/clientpositive/udf_case.q.out +++ b/ql/src/test/results/clientpositive/udf_case.q.out @@ -34,7 +34,7 @@ SELECT CASE 1 WHEN 22 THEN 23 WHEN 21 THEN 24 END -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT CASE 1 @@ -62,49 +62,38 @@ SELECT CASE 1 WHEN 22 THEN 23 WHEN 21 THEN 24 END -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE 1 1 2 3 4 5)) (TOK_SELEXPR (TOK_FUNCTION CASE 2 1 2 5)) (TOK_SELEXPR (TOK_FUNCTION CASE 14 12 13 14 15)) (TOK_SELEXPR (TOK_FUNCTION CASE 16 12 13 14 15)) (TOK_SELEXPR (TOK_FUNCTION CASE 17 18 TOK_NULL 17 20)) (TOK_SELEXPR (TOK_FUNCTION CASE 21 22 23 21 24))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE 1 1 2 3 4 5)) (TOK_SELEXPR (TOK_FUNCTION CASE 2 1 2 5)) (TOK_SELEXPR (TOK_FUNCTION CASE 14 12 13 14 15)) (TOK_SELEXPR (TOK_FUNCTION CASE 16 12 13 14 15)) (TOK_SELEXPR (TOK_FUNCTION CASE 17 18 TOK_NULL 17 20)) (TOK_SELEXPR (TOK_FUNCTION CASE 21 22 23 21 24))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) END - type: int - expr: CASE (2) WHEN (1) THEN (2) ELSE (5) END - type: int - expr: CASE (14) WHEN (12) THEN (13) WHEN (14) THEN (15) END - type: int - expr: CASE (16) WHEN (12) THEN (13) WHEN (14) THEN (15) END - type: int - expr: CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END - type: int - expr: CASE (21) WHEN (22) THEN (23) WHEN (21) THEN (24) END - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) END + type: int + expr: CASE (2) WHEN (1) THEN (2) ELSE (5) END + type: int + expr: CASE (14) WHEN (12) THEN (13) WHEN (14) THEN (15) END + type: int + expr: CASE (16) WHEN (12) THEN (13) WHEN (14) THEN (15) END + type: int + expr: CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END + type: int + expr: CASE (21) WHEN (22) THEN (23) WHEN (21) THEN (24) END + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + ListSink PREHOOK: query: SELECT CASE 1 @@ -132,7 +121,7 @@ PREHOOK: query: SELECT CASE 1 WHEN 22 THEN 23 WHEN 21 THEN 24 END -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -161,7 +150,7 @@ POSTHOOK: query: SELECT CASE 1 WHEN 22 THEN 23 WHEN 21 THEN 24 END -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -170,7 +159,7 @@ PREHOOK: query: -- verify that short-circuiting is working correctly for CASE -- we should never get to the ELSE branch, which would raise an exception SELECT CASE 1 WHEN 1 THEN 'yo' ELSE reflect('java.lang.String', 'bogus', 1) END -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -178,7 +167,7 @@ POSTHOOK: query: -- verify that short-circuiting is working correctly for CASE -- we should never get to the ELSE branch, which would raise an exception SELECT CASE 1 WHEN 1 THEN 'yo' ELSE reflect('java.lang.String', 'bogus', 1) END -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_case_thrift.q.out b/ql/src/test/results/clientpositive/udf_case_thrift.q.out index 7482e82..137dc34 100644 --- a/ql/src/test/results/clientpositive/udf_case_thrift.q.out +++ b/ql/src/test/results/clientpositive/udf_case_thrift.q.out @@ -14,7 +14,7 @@ SELECT CASE src_thrift.lint[0] WHEN '0' THEN src_thrift.lstring ELSE NULL END)[0] -FROM src_thrift LIMIT 3 +FROM src_thrift tablesample (3 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT CASE src_thrift.lint[0] @@ -32,43 +32,32 @@ SELECT CASE src_thrift.lint[0] WHEN '0' THEN src_thrift.lstring ELSE NULL END)[0] -FROM src_thrift LIMIT 3 +FROM src_thrift tablesample (3 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 0 (+ ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 1) 1 (+ ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 2) 2 100 5)) (TOK_SELEXPR (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) '0' 'zero' '10' (TOK_FUNCTION CONCAT ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) " is ten") 'default')) (TOK_SELEXPR ([ (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) '0' (. (TOK_TABLE_OR_COL src_thrift) lstring) TOK_NULL) 0))) (TOK_LIMIT 3))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 3))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 0 (+ ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 1) 1 (+ ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 2) 2 100 5)) (TOK_SELEXPR (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) '0' 'zero' '10' (TOK_FUNCTION CONCAT ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) " is ten") 'default')) (TOK_SELEXPR ([ (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) '0' (. (TOK_TABLE_OR_COL src_thrift) lstring) TOK_NULL) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src_thrift - TableScan - alias: src_thrift - Select Operator - expressions: - expr: CASE (lint[0]) WHEN (0) THEN ((lint[0] + 1)) WHEN (1) THEN ((lint[0] + 2)) WHEN (2) THEN (100) ELSE (5) END - type: int - expr: CASE (lstring[0]) WHEN ('0') THEN ('zero') WHEN ('10') THEN (concat(lstring[0], ' is ten')) ELSE ('default') END - type: string - expr: CASE (lstring[0]) WHEN ('0') THEN (lstring) ELSE (null) END[0] - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 3 + limit: -1 + Processor Tree: + TableScan + alias: src_thrift + Row Limit Per Split: 3 + Select Operator + expressions: + expr: CASE (lint[0]) WHEN (0) THEN ((lint[0] + 1)) WHEN (1) THEN ((lint[0] + 2)) WHEN (2) THEN (100) ELSE (5) END + type: int + expr: CASE (lstring[0]) WHEN ('0') THEN ('zero') WHEN ('10') THEN (concat(lstring[0], ' is ten')) ELSE ('default') END + type: string + expr: CASE (lstring[0]) WHEN ('0') THEN (lstring) ELSE (null) END[0] + type: string + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT CASE src_thrift.lint[0] @@ -86,7 +75,7 @@ PREHOOK: query: SELECT CASE src_thrift.lint[0] WHEN '0' THEN src_thrift.lstring ELSE NULL END)[0] -FROM src_thrift LIMIT 3 +FROM src_thrift tablesample (3 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src_thrift #### A masked pattern was here #### @@ -105,7 +94,7 @@ POSTHOOK: query: SELECT CASE src_thrift.lint[0] WHEN '0' THEN src_thrift.lstring ELSE NULL END)[0] -FROM src_thrift LIMIT 3 +FROM src_thrift tablesample (3 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_coalesce.q.out b/ql/src/test/results/clientpositive/udf_coalesce.q.out index c7aa3f7..8f9b35f 100644 --- a/ql/src/test/results/clientpositive/udf_coalesce.q.out +++ b/ql/src/test/results/clientpositive/udf_coalesce.q.out @@ -30,7 +30,7 @@ SELECT COALESCE(1), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT COALESCE(1), @@ -51,73 +51,62 @@ SELECT COALESCE(1), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE 1)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL 3)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 4 TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL '3')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '4' TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 2.0 TOK_NULL 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (TOK_FUNCTION IF TRUE TOK_NULL 0) TOK_NULL))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE 1)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL 3)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 4 TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL '3')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '4' TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 2.0 TOK_NULL 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (TOK_FUNCTION IF TRUE TOK_NULL 0) TOK_NULL))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: COALESCE(1) - type: int - expr: COALESCE(1,2) - type: int - expr: COALESCE(null,2) - type: int - expr: COALESCE(1,null) - type: int - expr: COALESCE(null,null,3) - type: int - expr: COALESCE(4,null,null,null) - type: int - expr: COALESCE('1') - type: string - expr: COALESCE('1','2') - type: string - expr: COALESCE(null,'2') - type: string - expr: COALESCE('1',null) - type: string - expr: COALESCE(null,null,'3') - type: string - expr: COALESCE('4',null,null,null) - type: string - expr: COALESCE(1.0) - type: double - expr: COALESCE(1.0,2.0) - type: double - expr: COALESCE(null,2.0) - type: double - expr: COALESCE(null,2.0,3.0) - type: double - expr: COALESCE(2.0,null,3.0) - type: double - expr: COALESCE(if(true, null, 0),null) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: COALESCE(1) + type: int + expr: COALESCE(1,2) + type: int + expr: COALESCE(null,2) + type: int + expr: COALESCE(1,null) + type: int + expr: COALESCE(null,null,3) + type: int + expr: COALESCE(4,null,null,null) + type: int + expr: COALESCE('1') + type: string + expr: COALESCE('1','2') + type: string + expr: COALESCE(null,'2') + type: string + expr: COALESCE('1',null) + type: string + expr: COALESCE(null,null,'3') + type: string + expr: COALESCE('4',null,null,null) + type: string + expr: COALESCE(1.0) + type: double + expr: COALESCE(1.0,2.0) + type: double + expr: COALESCE(null,2.0) + type: double + expr: COALESCE(null,2.0,3.0) + type: double + expr: COALESCE(2.0,null,3.0) + type: double + expr: COALESCE(if(true, null, 0),null) + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 + ListSink PREHOOK: query: SELECT COALESCE(1), @@ -138,7 +127,7 @@ PREHOOK: query: SELECT COALESCE(1), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -160,7 +149,7 @@ POSTHOOK: query: SELECT COALESCE(1), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -181,36 +170,25 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 1) 999)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) mystring) '999')) (TOK_SELEXPR (TOK_FUNCTION COALESCE ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_2') '999'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src_thrift - TableScan - alias: src_thrift - Select Operator - expressions: - expr: COALESCE(lint[1],999) - type: int - expr: COALESCE(lintstring[0].mystring,'999') - type: string - expr: COALESCE(mstringstring['key_2'],'999') - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src_thrift + Select Operator + expressions: + expr: COALESCE(lint[1],999) + type: int + expr: COALESCE(lintstring[0].mystring,'999') + type: string + expr: COALESCE(mstringstring['key_2'],'999') + type: string + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT COALESCE(src_thrift.lint[1], 999), diff --git a/ql/src/test/results/clientpositive/udf_concat.q.out b/ql/src/test/results/clientpositive/udf_concat.q.out index f7fe752..6cc4c0f 100644 --- a/ql/src/test/results/clientpositive/udf_concat.q.out +++ b/ql/src/test/results/clientpositive/udf_concat.q.out @@ -23,7 +23,7 @@ PREHOOK: query: SELECT concat(1, 2), concat(1), concat('1234', 'abc', 'extra argument') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -38,7 +38,7 @@ POSTHOOK: query: SELECT concat(1, 2), concat(1), concat('1234', 'abc', 'extra argument') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -47,7 +47,7 @@ PREHOOK: query: -- binary/mixed SELECT concat(cast('ab' as binary), cast('cd' as binary)), concat('ab', cast('cd' as binary)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -55,7 +55,7 @@ POSTHOOK: query: -- binary/mixed SELECT concat(cast('ab' as binary), cast('cd' as binary)), concat('ab', cast('cd' as binary)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_concat_ws.q.out b/ql/src/test/results/clientpositive/udf_concat_ws.q.out index 9fd1d1d..33f822f 100644 --- a/ql/src/test/results/clientpositive/udf_concat_ws.q.out +++ b/ql/src/test/results/clientpositive/udf_concat_ws.q.out @@ -46,38 +46,27 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws ',' (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws TOK_NULL (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' (. (TOK_TABLE_OR_COL dest1) c1) TOK_NULL (. (TOK_TABLE_OR_COL dest1) c3)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - dest1 - TableScan - alias: dest1 - Select Operator - expressions: - expr: concat_ws(c1, c2, c3) - type: string - expr: concat_ws(',', c1, c2, c3) - type: string - expr: concat_ws(null, c1, c2, c3) - type: string - expr: concat_ws('**', c1, null, c3) - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: dest1 + Select Operator + expressions: + expr: concat_ws(c1, c2, c3) + type: string + expr: concat_ws(',', c1, c2, c3) + type: string + expr: concat_ws(null, c1, c2, c3) + type: string + expr: concat_ws('**', c1, null, c3) + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + ListSink PREHOOK: query: SELECT concat_ws(dest1.c1, dest1.c2, dest1.c3), @@ -106,7 +95,7 @@ SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'), concat_ws('_', array('www', 'face'), array('book', 'com', '1234')), concat_ws('**', 'www', array('face'), array('book', 'com', '1234')), concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')), - concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1 + concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: -- evalutes function for array of strings EXPLAIN @@ -116,54 +105,43 @@ SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'), concat_ws('_', array('www', 'face'), array('book', 'com', '1234')), concat_ws('**', 'www', array('face'), array('book', 'com', '1234')), concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')), - concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1 + concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Lineage: dest1.c1 SIMPLE [] POSTHOOK: Lineage: dest1.c2 SIMPLE [] POSTHOOK: Lineage: dest1.c3 SIMPLE [] ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws '.' (TOK_FUNCTION array 'www' 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws '-' 'www' (TOK_FUNCTION array 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'F' 'www' (TOK_FUNCTION array 'face' 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '_' (TOK_FUNCTION array 'www' 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' 'www' (TOK_FUNCTION array 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '[]' (TOK_FUNCTION array 'www') 'face' (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'AAA' (TOK_FUNCTION array 'www') (TOK_FUNCTION array 'face' 'book' 'com') '1234'))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws '.' (TOK_FUNCTION array 'www' 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws '-' 'www' (TOK_FUNCTION array 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'F' 'www' (TOK_FUNCTION array 'face' 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '_' (TOK_FUNCTION array 'www' 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' 'www' (TOK_FUNCTION array 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '[]' (TOK_FUNCTION array 'www') 'face' (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'AAA' (TOK_FUNCTION array 'www') (TOK_FUNCTION array 'face' 'book' 'com') '1234'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - dest1 - TableScan - alias: dest1 - Select Operator - expressions: - expr: concat_ws('.', array('www','face','book','com'), '1234') - type: string - expr: concat_ws('-', 'www', array('face','book','com'), '1234') - type: string - expr: concat_ws('F', 'www', array('face','book','com','1234')) - type: string - expr: concat_ws('_', array('www','face'), array('book','com','1234')) - type: string - expr: concat_ws('**', 'www', array('face'), array('book','com','1234')) - type: string - expr: concat_ws('[]', array('www'), 'face', array('book','com','1234')) - type: string - expr: concat_ws('AAA', array('www'), array('face','book','com'), '1234') - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: dest1 + Row Limit Per Split: 1 + Select Operator + expressions: + expr: concat_ws('.', array('www','face','book','com'), '1234') + type: string + expr: concat_ws('-', 'www', array('face','book','com'), '1234') + type: string + expr: concat_ws('F', 'www', array('face','book','com','1234')) + type: string + expr: concat_ws('_', array('www','face'), array('book','com','1234')) + type: string + expr: concat_ws('**', 'www', array('face'), array('book','com','1234')) + type: string + expr: concat_ws('[]', array('www'), 'face', array('book','com','1234')) + type: string + expr: concat_ws('AAA', array('www'), array('face','book','com'), '1234') + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + ListSink PREHOOK: query: SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'), @@ -172,7 +150,7 @@ PREHOOK: query: SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234 concat_ws('_', array('www', 'face'), array('book', 'com', '1234')), concat_ws('**', 'www', array('face'), array('book', 'com', '1234')), concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')), - concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1 + concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@dest1 #### A masked pattern was here #### @@ -182,7 +160,7 @@ POSTHOOK: query: SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '123 concat_ws('_', array('www', 'face'), array('book', 'com', '1234')), concat_ws('**', 'www', array('face'), array('book', 'com', '1234')), concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')), - concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1 + concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 #### A masked pattern was here #### @@ -196,7 +174,7 @@ PREHOOK: query: SELECT concat_ws(NULL, array('www', 'face', 'book', 'com'), '123 concat_ws(NULL, array('www', 'face'), array('book', 'com', '1234')), concat_ws(NULL, 'www', array('face'), array('book', 'com', '1234')), concat_ws(NULL, array('www'), 'face', array('book', 'com', '1234')), - concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1 + concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@dest1 #### A masked pattern was here #### @@ -206,7 +184,7 @@ POSTHOOK: query: SELECT concat_ws(NULL, array('www', 'face', 'book', 'com'), '12 concat_ws(NULL, array('www', 'face'), array('book', 'com', '1234')), concat_ws(NULL, 'www', array('face'), array('book', 'com', '1234')), concat_ws(NULL, array('www'), 'face', array('book', 'com', '1234')), - concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1 + concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_conv.q.out b/ql/src/test/results/clientpositive/udf_conv.q.out index 010e192..09b256d 100644 --- a/ql/src/test/results/clientpositive/udf_conv.q.out +++ b/ql/src/test/results/clientpositive/udf_conv.q.out @@ -22,7 +22,7 @@ SELECT conv('22', 10, 10), conv('110011', 2, 16), conv('facebook', 36, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -34,7 +34,7 @@ SELECT conv('22', 10, 10), conv('110011', 2, 16), conv('facebook', 36, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -46,7 +46,7 @@ SELECT conv('1011', 2, -16), conv('-1', 10, 16), conv('-15', 10, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -57,7 +57,7 @@ SELECT conv('1011', 2, -16), conv('-1', 10, 16), conv('-15', 10, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -69,7 +69,7 @@ SELECT conv('9223372036854775807', 36, -16), conv('-9223372036854775807', 36, 16), conv('-9223372036854775807', 36, -16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -80,7 +80,7 @@ SELECT conv('9223372036854775807', 36, -16), conv('-9223372036854775807', 36, 16), conv('-9223372036854775807', 36, -16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -93,7 +93,7 @@ SELECT conv('131', 1, 5), conv('515', 5, 100), conv('10', -2, 2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -105,7 +105,7 @@ SELECT conv('131', 1, 5), conv('515', 5, 100), conv('10', -2, 2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -116,7 +116,7 @@ SELECT conv(4521, 10, 36), conv(22, 10, 10), conv(110011, 2, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -126,7 +126,7 @@ SELECT conv(4521, 10, 36), conv(22, 10, 10), conv(110011, 2, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -136,7 +136,7 @@ PREHOOK: query: SELECT conv(1011, 2, -16), conv(-1, 10, 16), conv(-15, 10, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -145,7 +145,7 @@ POSTHOOK: query: SELECT conv(1011, 2, -16), conv(-1, 10, 16), conv(-15, 10, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -155,7 +155,7 @@ PREHOOK: query: SELECT conv(9223372036854775807, 36, -16), conv(-9223372036854775807, 36, 16), conv(-9223372036854775807, 36, -16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -164,7 +164,7 @@ POSTHOOK: query: SELECT conv(9223372036854775807, 36, -16), conv(-9223372036854775807, 36, 16), conv(-9223372036854775807, 36, -16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -174,7 +174,7 @@ PREHOOK: query: SELECT conv(131, 1, 5), conv(515, 5, 100), conv('10', -2, 2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -183,7 +183,7 @@ POSTHOOK: query: SELECT conv(131, 1, 5), conv(515, 5, 100), conv('10', -2, 2) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -192,7 +192,7 @@ PREHOOK: query: -- Make sure that state is properly reset. SELECT conv(key, 10, 16), conv(key, 16, 10) -FROM src LIMIT 3 +FROM src tablesample (3 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -200,7 +200,7 @@ POSTHOOK: query: -- Make sure that state is properly reset. SELECT conv(key, 10, 16), conv(key, 16, 10) -FROM src LIMIT 3 +FROM src tablesample (3 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_cos.q.out b/ql/src/test/results/clientpositive/udf_cos.q.out index dd0564c..02ba24e 100644 --- a/ql/src/test/results/clientpositive/udf_cos.q.out +++ b/ql/src/test/results/clientpositive/udf_cos.q.out @@ -12,23 +12,23 @@ Example: > SELECT cos(0) FROM src LIMIT 1; 1 PREHOOK: query: SELECT cos(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT cos(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT cos(0.98), cos(1.57), cos(-0.5) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT cos(0.98), cos(1.57), cos(-0.5) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_degrees.q.out b/ql/src/test/results/clientpositive/udf_degrees.q.out index e171df4..09f5582 100644 --- a/ql/src/test/results/clientpositive/udf_degrees.q.out +++ b/ql/src/test/results/clientpositive/udf_degrees.q.out @@ -1,47 +1,36 @@ -PREHOOK: query: explain -select degrees(PI()) FROM src LIMIT 1 +PREHOOK: query: explain +select degrees(PI()) FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: explain -select degrees(PI()) FROM src LIMIT 1 +POSTHOOK: query: explain +select degrees(PI()) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI)))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: degrees(pi()) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: degrees(pi()) + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select degrees(PI()) FROM src LIMIT 1 +PREHOOK: query: select degrees(PI()) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select degrees(PI()) FROM src LIMIT 1 +POSTHOOK: query: select degrees(PI()) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -61,49 +50,38 @@ Example: -1 PREHOOK: query: explain -select degrees(PI()) FROM src LIMIT 1 +select degrees(PI()) FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: explain -select degrees(PI()) FROM src LIMIT 1 +select degrees(PI()) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI)))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: degrees(pi()) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: degrees(pi()) + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select degrees(PI()) FROM src LIMIT 1 +PREHOOK: query: select degrees(PI()) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select degrees(PI()) FROM src LIMIT 1 +POSTHOOK: query: select degrees(PI()) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_div.q.out b/ql/src/test/results/clientpositive/udf_div.q.out index 2398b5c..d200e0c 100644 --- a/ql/src/test/results/clientpositive/udf_div.q.out +++ b/ql/src/test/results/clientpositive/udf_div.q.out @@ -11,11 +11,11 @@ a div b - Divide a by b rounded to the long integer Example: > SELECT 3 div 2 FROM src LIMIT 1; 1 -PREHOOK: query: SELECT 3 DIV 2 FROM SRC LIMIT 1 +PREHOOK: query: SELECT 3 DIV 2 FROM SRC tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT 3 DIV 2 FROM SRC LIMIT 1 +POSTHOOK: query: SELECT 3 DIV 2 FROM SRC tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_divide.q.out b/ql/src/test/results/clientpositive/udf_divide.q.out index e346b00..e0594cf 100644 --- a/ql/src/test/results/clientpositive/udf_divide.q.out +++ b/ql/src/test/results/clientpositive/udf_divide.q.out @@ -11,11 +11,11 @@ a / b - Divide a by b Example: > SELECT 3 / 2 FROM src LIMIT 1; 1.5 -PREHOOK: query: SELECT 3 / 2 FROM SRC LIMIT 1 +PREHOOK: query: SELECT 3 / 2 FROM SRC tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT 3 / 2 FROM SRC LIMIT 1 +POSTHOOK: query: SELECT 3 / 2 FROM SRC tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_elt.q.out b/ql/src/test/results/clientpositive/udf_elt.q.out index 68c7b84..2f14410 100644 --- a/ql/src/test/results/clientpositive/udf_elt.q.out +++ b/ql/src/test/results/clientpositive/udf_elt.q.out @@ -23,7 +23,7 @@ SELECT elt(2, 'abc', 'defg'), elt(null, 'abc', 'defg'), elt(0, 'abc', 'defg'), elt(3, 'abc', 'defg') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT elt(2, 'abc', 'defg'), @@ -37,59 +37,48 @@ SELECT elt(2, 'abc', 'defg'), elt(null, 'abc', 'defg'), elt(0, 'abc', 'defg'), elt(3, 'abc', 'defg') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION elt 2 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'aa' 'bb' 'cc' 'dd' 'ee' 'ff' 'gg')) (TOK_SELEXPR (TOK_FUNCTION elt '1' 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_TINYINT '2'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_SMALLINT '12345'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION elt TOK_NULL 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 0 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'abc' 'defg'))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION elt 2 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'aa' 'bb' 'cc' 'dd' 'ee' 'ff' 'gg')) (TOK_SELEXPR (TOK_FUNCTION elt '1' 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_TINYINT '2'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_SMALLINT '12345'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION elt TOK_NULL 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 0 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'abc' 'defg'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: elt(2, 'abc', 'defg') - type: string - expr: elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg') - type: string - expr: elt('1', 'abc', 'defg') - type: string - expr: elt(2, 'aa', UDFToByte('2')) - type: string - expr: elt(2, 'aa', UDFToShort('12345')) - type: string - expr: elt(2, 'aa', UDFToLong('123456789012')) - type: string - expr: elt(2, 'aa', UDFToFloat(1.25)) - type: string - expr: elt(2, 'aa', 16.0) - type: string - expr: elt(null, 'abc', 'defg') - type: string - expr: elt(0, 'abc', 'defg') - type: string - expr: elt(3, 'abc', 'defg') - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: elt(2, 'abc', 'defg') + type: string + expr: elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg') + type: string + expr: elt('1', 'abc', 'defg') + type: string + expr: elt(2, 'aa', UDFToByte('2')) + type: string + expr: elt(2, 'aa', UDFToShort('12345')) + type: string + expr: elt(2, 'aa', UDFToLong('123456789012')) + type: string + expr: elt(2, 'aa', UDFToFloat(1.25)) + type: string + expr: elt(2, 'aa', 16.0) + type: string + expr: elt(null, 'abc', 'defg') + type: string + expr: elt(0, 'abc', 'defg') + type: string + expr: elt(3, 'abc', 'defg') + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 + ListSink PREHOOK: query: SELECT elt(2, 'abc', 'defg'), @@ -103,7 +92,7 @@ PREHOOK: query: SELECT elt(2, 'abc', 'defg'), elt(null, 'abc', 'defg'), elt(0, 'abc', 'defg'), elt(3, 'abc', 'defg') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -118,7 +107,7 @@ POSTHOOK: query: SELECT elt(2, 'abc', 'defg'), elt(null, 'abc', 'defg'), elt(0, 'abc', 'defg'), elt(3, 'abc', 'defg') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_equal.q.out b/ql/src/test/results/clientpositive/udf_equal.q.out index 9e1cb61..0aad354 100644 --- a/ql/src/test/results/clientpositive/udf_equal.q.out +++ b/ql/src/test/results/clientpositive/udf_equal.q.out @@ -20,11 +20,11 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTENDED == POSTHOOK: type: DESCFUNCTION a == b - Returns TRUE if a equals b and false otherwise Synonyms: = -PREHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src LIMIT 1 +PREHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src LIMIT 1 +POSTHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -39,11 +39,11 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: DESCRIBE FUNCTION EXTENDED <=> POSTHOOK: type: DESCFUNCTION a <=> b - Returns same result with EQUAL(=) operator for non-null operands, but returns TRUE if both are NULL, FALSE if one of the them is NULL -PREHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src LIMIT 1 +PREHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src LIMIT 1 +POSTHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_explode.q.out b/ql/src/test/results/clientpositive/udf_explode.q.out index 841c8eb..6fcdacd 100644 --- a/ql/src/test/results/clientpositive/udf_explode.q.out +++ b/ql/src/test/results/clientpositive/udf_explode.q.out @@ -8,12 +8,12 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: DESCRIBE FUNCTION EXTENDED explode POSTHOOK: type: DESCFUNCTION explode(a) - separates the elements of array a into multiple rows, or the elements of a map into multiple rows and columns -PREHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3 +PREHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3 +POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)) (TOK_LIMIT 3))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -26,6 +26,7 @@ STAGE PLANS: src TableScan alias: src + Row Limit Per Split: 1 GatherStats: false Select Operator expressions: @@ -34,27 +35,26 @@ STAGE PLANS: outputColumnNames: _col0 UDTF Operator function name: explode - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns col - columns.types int - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns col + columns.types int + escape.delim \ + hive.serialization.extend.nesting.levels true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -105,19 +105,18 @@ STAGE PLANS: Stage: Stage-0 Fetch Operator - limit: 3 + limit: -1 -PREHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol +PREHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol +POSTHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)) (TOK_LIMIT 3))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) myCol)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) myCol)))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) myCol)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) myCol)))) STAGE DEPENDENCIES: Stage-1 is a root stage - Stage-2 depends on stages: Stage-1 Stage-0 is a root stage STAGE PLANS: @@ -127,6 +126,7 @@ STAGE PLANS: a:src TableScan alias: src + Row Limit Per Split: 1 GatherStats: false Select Operator expressions: @@ -135,13 +135,32 @@ STAGE PLANS: outputColumnNames: _col0 UDTF Operator function name: explode - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: + Select Operator + expressions: + expr: col + type: int + outputColumnNames: col + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: col type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -191,85 +210,6 @@ STAGE PLANS: /src [a:src] Needs Tagging: false Reduce Operator Tree: - Extract - Limit - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: int - mode: hash - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col1 - columns.types int,bigint - escape.delim \ - serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - - Stage: Stage-2 - Map Reduce - Alias -> Map Operator Tree: -#### A masked pattern was here #### - TableScan - GatherStats: false - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: -mr-10002 - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col1 - columns.types int,bigint - escape.delim \ - serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col1 - columns.types int,bigint - escape.delim \ - serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - Truncated Path -> Alias: -#### A masked pattern was here #### - Needs Tagging: false - Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) @@ -312,45 +252,45 @@ STAGE PLANS: limit: -1 -PREHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3 +PREHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3 +POSTHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 2 3 -PREHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src LIMIT 3 +PREHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src LIMIT 3 +POSTHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 2 3 -PREHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol +PREHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol +POSTHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 1 2 1 3 1 -PREHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3 +PREHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3 +POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)) (TOK_LIMIT 3))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -363,6 +303,7 @@ STAGE PLANS: src TableScan alias: src + Row Limit Per Split: 1 GatherStats: false Select Operator expressions: @@ -371,27 +312,26 @@ STAGE PLANS: outputColumnNames: _col0 UDTF Operator function name: explode - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns key,value - columns.types int:string - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns key,value + columns.types int:string + escape.delim \ + hive.serialization.extend.nesting.levels true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -442,19 +382,18 @@ STAGE PLANS: Stage: Stage-0 Fetch Operator - limit: 3 + limit: -1 -PREHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val +PREHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val +POSTHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)) (TOK_LIMIT 3))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) val)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) val)))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) val)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) val)))) STAGE DEPENDENCIES: Stage-1 is a root stage - Stage-2 depends on stages: Stage-1 Stage-0 is a root stage STAGE PLANS: @@ -464,6 +403,7 @@ STAGE PLANS: a:src TableScan alias: src + Row Limit Per Split: 1 GatherStats: false Select Operator expressions: @@ -472,15 +412,40 @@ STAGE PLANS: outputColumnNames: _col0 UDTF Operator function name: explode - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: int expr: value type: string + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + expr: _col1 + type: string + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: int + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -530,93 +495,6 @@ STAGE PLANS: /src [a:src] Needs Tagging: false Reduce Operator Tree: - Extract - Limit - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: int - expr: _col1 - type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types int,string,bigint - escape.delim \ - serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - - Stage: Stage-2 - Map Reduce - Alias -> Map Operator Tree: -#### A masked pattern was here #### - TableScan - GatherStats: false - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: int - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: -mr-10002 - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types int,string,bigint - escape.delim \ - serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types int,string,bigint - escape.delim \ - serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - Truncated Path -> Alias: -#### A masked pattern was here #### - Needs Tagging: false - Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) @@ -663,22 +541,22 @@ STAGE PLANS: limit: -1 -PREHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3 +PREHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3 +POSTHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 one 2 two 3 three -PREHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val +PREHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val +POSTHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -694,11 +572,11 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table lazy_array_map (map_col map, array_col array) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@lazy_array_map -PREHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@lazy_array_map -POSTHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@lazy_array_map diff --git a/ql/src/test/results/clientpositive/udf_field.q.out b/ql/src/test/results/clientpositive/udf_field.q.out index de6f817..d0e58b2 100644 --- a/ql/src/test/results/clientpositive/udf_field.q.out +++ b/ql/src/test/results/clientpositive/udf_field.q.out @@ -13,7 +13,7 @@ PREHOOK: query: SELECT field("x", "a", "b", "c", "d"), field(NULL, "a", "b", "c", "d"), field(0, 1, 2, 3, 4) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -21,7 +21,7 @@ POSTHOOK: query: SELECT field("x", "a", "b", "c", "d"), field(NULL, "a", "b", "c", "d"), field(0, 1, 2, 3, 4) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -32,7 +32,7 @@ PREHOOK: query: SELECT field("c", "a", "b", "c", "d"), field("d", "a", "b", "c", "d"), field("d", "a", "b", NULL, "d") -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -42,7 +42,7 @@ POSTHOOK: query: SELECT field("c", "a", "b", "c", "d"), field("d", "a", "b", "c", "d"), field("d", "a", "b", NULL, "d") -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -53,7 +53,7 @@ PREHOOK: query: SELECT field(3, 1, 2, 3, 4), field(4, 1, 2, 3, 4), field(4, 1, 2, NULL, 4) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -63,7 +63,7 @@ POSTHOOK: query: SELECT field(3, 1, 2, 3, 4), field(4, 1, 2, 3, 4), field(4, 1, 2, NULL, 4) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_find_in_set.q.out b/ql/src/test/results/clientpositive/udf_find_in_set.q.out index 01b67b6..81fe0c7 100644 --- a/ql/src/test/results/clientpositive/udf_find_in_set.q.out +++ b/ql/src/test/results/clientpositive/udf_find_in_set.q.out @@ -24,32 +24,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION find_in_set (. (TOK_TABLE_OR_COL src1) key) (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL src1) key) ',' (. (TOK_TABLE_OR_COL src1) value))))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src1 - TableScan - alias: src1 - Select Operator - expressions: - expr: find_in_set(key, concat(key, ',', value)) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src1 + Select Operator + expressions: + expr: find_in_set(key, concat(key, ',', value)) + type: int + outputColumnNames: _col0 + ListSink PREHOOK: query: FROM src1 SELECT find_in_set(src1.key,concat(src1.key,',',src1.value)) @@ -85,119 +74,119 @@ POSTHOOK: Input: default@src1 1 1 1 -PREHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 1 -PREHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 2 -PREHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 3 -PREHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 0 -PREHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### NULL -PREHOOK: query: SELECT find_in_set('a',null) FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('a',null) FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('a',null) FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('a',null) FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### NULL -PREHOOK: query: SELECT find_in_set('', '') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('', '') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('', '') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('', '') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 1 -PREHOOK: query: SELECT find_in_set('',',') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('',',') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('',',') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('',',') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 1 -PREHOOK: query: SELECT find_in_set('','a,,b') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('','a,,b') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('','a,,b') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('','a,,b') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 2 -PREHOOK: query: SELECT find_in_set('','a,b,') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('','a,b,') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('','a,b,') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('','a,b,') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 3 -PREHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 0 -PREHOOK: query: SELECT find_in_set('a','') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('a','') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('a','') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('a','') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### 0 -PREHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 LIMIT 1 +PREHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 LIMIT 1 +POSTHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_format_number.q.out b/ql/src/test/results/clientpositive/udf_format_number.q.out index ad0ca4c..c214e34 100644 --- a/ql/src/test/results/clientpositive/udf_format_number.q.out +++ b/ql/src/test/results/clientpositive/udf_format_number.q.out @@ -22,61 +22,50 @@ Example: PREHOOK: query: EXPLAIN SELECT format_number(12332.123456, 4), format_number(12332.1,4), - format_number(12332.2,0) FROM src limit 1 + format_number(12332.2,0) FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT format_number(12332.123456, 4), format_number(12332.1,4), - format_number(12332.2,0) FROM src limit 1 + format_number(12332.2,0) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION format_number 12332.123456 4)) (TOK_SELEXPR (TOK_FUNCTION format_number 12332.1 4)) (TOK_SELEXPR (TOK_FUNCTION format_number 12332.2 0))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION format_number 12332.123456 4)) (TOK_SELEXPR (TOK_FUNCTION format_number 12332.1 4)) (TOK_SELEXPR (TOK_FUNCTION format_number 12332.2 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: format_number(12332.123456, 4) - type: string - expr: format_number(12332.1, 4) - type: string - expr: format_number(12332.2, 0) - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: format_number(12332.123456, 4) + type: string + expr: format_number(12332.1, 4) + type: string + expr: format_number(12332.2, 0) + type: string + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT format_number(12332.123456, 4), format_number(12332.1,4), format_number(12332.2,0) -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT format_number(12332.123456, 4), format_number(12332.1,4), format_number(12332.2,0) -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -86,7 +75,7 @@ SELECT format_number(0.123456789, 12), format_number(12345678.123456789, 5), format_number(1234567.123456789, 7), format_number(123456.123456789, 0) -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -95,7 +84,7 @@ SELECT format_number(0.123456789, 12), format_number(12345678.123456789, 5), format_number(1234567.123456789, 7), format_number(123456.123456789, 0) -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -105,7 +94,7 @@ SELECT format_number(-123456.123456789, 0), format_number(-1234567.123456789, 2), format_number(-0.123456789, 15), format_number(-12345.123456789, 4) -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -114,7 +103,7 @@ SELECT format_number(-123456.123456789, 0), format_number(-1234567.123456789, 2), format_number(-0.123456789, 15), format_number(-12345.123456789, 4) -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -125,7 +114,7 @@ SELECT format_number(0.0, 4), format_number(000.0000, 1), format_number(00000.0000, 1), format_number(-00.0, 4) -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -135,7 +124,7 @@ SELECT format_number(0.0, 4), format_number(000.0000, 1), format_number(00000.0000, 1), format_number(-00.0, 4) -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -146,7 +135,7 @@ SELECT format_number(0, 0), format_number(12, 2), format_number(123, 5), format_number(1234, 7) -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -156,7 +145,7 @@ SELECT format_number(0, 0), format_number(12, 2), format_number(123, 5), format_number(1234, 7) -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -171,7 +160,7 @@ SELECT format_number(-9223372036854775807, 10), format_number(9223372036854775807, 20), format_number(4.9E-324, 324), format_number(1.7976931348623157E308, 308) -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -185,7 +174,7 @@ SELECT format_number(-9223372036854775807, 10), format_number(9223372036854775807, 20), format_number(4.9E-324, 324), format_number(1.7976931348623157E308, 308) -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_get_json_object.q.out b/ql/src/test/results/clientpositive/udf_get_json_object.q.out index a7b37f1..bc01d2e 100644 --- a/ql/src/test/results/clientpositive/udf_get_json_object.q.out +++ b/ql/src/test/results/clientpositive/udf_get_json_object.q.out @@ -48,32 +48,21 @@ ABSTRACT SYNTAX TREE: #### A masked pattern was here #### STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src_json - TableScan - alias: src_json - Select Operator - expressions: -#### A masked pattern was here #### - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src_json + Select Operator + expressions: +#### A masked pattern was here #### + type: string + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT get_json_object(src_json.json, '$') FROM src_json @@ -184,11 +173,11 @@ CREATE TABLE dest2(c1 STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest2 POSTHOOK: Lineage: dest1.c1 SIMPLE [] -PREHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest2 -POSTHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE dest2 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest2 diff --git a/ql/src/test/results/clientpositive/udf_greaterthan.q.out b/ql/src/test/results/clientpositive/udf_greaterthan.q.out index cbeaf95..bf31f31 100644 --- a/ql/src/test/results/clientpositive/udf_greaterthan.q.out +++ b/ql/src/test/results/clientpositive/udf_greaterthan.q.out @@ -8,11 +8,11 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: DESCRIBE FUNCTION EXTENDED > POSTHOOK: type: DESCFUNCTION a > b - Returns TRUE if a is greater than b -PREHOOK: query: SELECT true>false, false>true, false>false, true>true FROM src LIMIT 1 +PREHOOK: query: SELECT true>false, false>true, false>false, true>true FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT true>false, false>true, false>false, true>true FROM src LIMIT 1 +POSTHOOK: query: SELECT true>false, false>true, false>false, true>true FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_greaterthanorequal.q.out b/ql/src/test/results/clientpositive/udf_greaterthanorequal.q.out index c97d557..86333cd 100644 --- a/ql/src/test/results/clientpositive/udf_greaterthanorequal.q.out +++ b/ql/src/test/results/clientpositive/udf_greaterthanorequal.q.out @@ -8,11 +8,11 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: DESCRIBE FUNCTION EXTENDED >= POSTHOOK: type: DESCFUNCTION a >= b - Returns TRUE if a is not smaller than b -PREHOOK: query: SELECT true>=false, false>=true, false>=false, true>=true FROM src LIMIT 1 +PREHOOK: query: SELECT true>=false, false>=true, false>=false, true>=true FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT true>=false, false>=true, false>=false, true>=true FROM src LIMIT 1 +POSTHOOK: query: SELECT true>=false, false>=true, false>=false, true>=true FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_hash.q.out b/ql/src/test/results/clientpositive/udf_hash.q.out index bd25af5..1509efc 100644 --- a/ql/src/test/results/clientpositive/udf_hash.q.out +++ b/ql/src/test/results/clientpositive/udf_hash.q.out @@ -14,7 +14,7 @@ SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), hash(CAST(1.25 AS FLOAT)), hash(CAST(16.0 AS DOUBLE)), hash('400'), hash('abc'), hash(TRUE), hash(FALSE), hash(1, 2, 3) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), @@ -22,59 +22,48 @@ SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), hash(CAST(1.25 AS FLOAT)), hash(CAST(16.0 AS DOUBLE)), hash('400'), hash('abc'), hash(TRUE), hash(FALSE), hash(1, 2, 3) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_TINYINT 1))) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_SMALLINT 2))) (TOK_SELEXPR (TOK_FUNCTION hash 3)) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION hash '400')) (TOK_SELEXPR (TOK_FUNCTION hash 'abc')) (TOK_SELEXPR (TOK_FUNCTION hash TRUE)) (TOK_SELEXPR (TOK_FUNCTION hash FALSE)) (TOK_SELEXPR (TOK_FUNCTION hash 1 2 3))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_TINYINT 1))) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_SMALLINT 2))) (TOK_SELEXPR (TOK_FUNCTION hash 3)) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION hash (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION hash '400')) (TOK_SELEXPR (TOK_FUNCTION hash 'abc')) (TOK_SELEXPR (TOK_FUNCTION hash TRUE)) (TOK_SELEXPR (TOK_FUNCTION hash FALSE)) (TOK_SELEXPR (TOK_FUNCTION hash 1 2 3))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: hash(UDFToByte(1)) - type: int - expr: hash(UDFToShort(2)) - type: int - expr: hash(3) - type: int - expr: hash(UDFToLong('123456789012')) - type: int - expr: hash(UDFToFloat(1.25)) - type: int - expr: hash(16.0) - type: int - expr: hash('400') - type: int - expr: hash('abc') - type: int - expr: hash(true) - type: int - expr: hash(false) - type: int - expr: hash(1,2,3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: hash(UDFToByte(1)) + type: int + expr: hash(UDFToShort(2)) + type: int + expr: hash(3) + type: int + expr: hash(UDFToLong('123456789012')) + type: int + expr: hash(UDFToFloat(1.25)) + type: int + expr: hash(16.0) + type: int + expr: hash('400') + type: int + expr: hash('abc') + type: int + expr: hash(true) + type: int + expr: hash(false) + type: int + expr: hash(1,2,3) + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 + ListSink PREHOOK: query: SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), @@ -82,7 +71,7 @@ PREHOOK: query: SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), hash(CAST(1.25 AS FLOAT)), hash(CAST(16.0 AS DOUBLE)), hash('400'), hash('abc'), hash(TRUE), hash(FALSE), hash(1, 2, 3) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -91,7 +80,7 @@ POSTHOOK: query: SELECT hash(CAST(1 AS TINYINT)), hash(CAST(2 AS SMALLINT)), hash(CAST(1.25 AS FLOAT)), hash(CAST(16.0 AS DOUBLE)), hash('400'), hash('abc'), hash(TRUE), hash(FALSE), hash(1, 2, 3) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_hex.q.out b/ql/src/test/results/clientpositive/udf_hex.q.out index 78263f3..540b361 100644 --- a/ql/src/test/results/clientpositive/udf_hex.q.out +++ b/ql/src/test/results/clientpositive/udf_hex.q.out @@ -21,7 +21,7 @@ SELECT hex('Facebook'), hex('\0'), hex('qwertyuiopasdfghjkl') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -31,7 +31,7 @@ SELECT hex('Facebook'), hex('\0'), hex('qwertyuiopasdfghjkl') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -41,7 +41,7 @@ SELECT hex(1), hex(0), hex(4207849477) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -50,18 +50,18 @@ SELECT hex(1), hex(0), hex(4207849477) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 0 FACEB005 PREHOOK: query: -- Negative numbers should be treated as two's complement (64 bit). -SELECT hex(-5) FROM src LIMIT 1 +SELECT hex(-5) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Negative numbers should be treated as two's complement (64 bit). -SELECT hex(-5) FROM src LIMIT 1 +SELECT hex(-5) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_hour.q.out b/ql/src/test/results/clientpositive/udf_hour.q.out index 2a6c722..6583a8b 100644 --- a/ql/src/test/results/clientpositive/udf_hour.q.out +++ b/ql/src/test/results/clientpositive/udf_hour.q.out @@ -26,40 +26,29 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION hour '2009-08-07 13:14:15')) (TOK_SELEXPR (TOK_FUNCTION hour '13:14:15')) (TOK_SELEXPR (TOK_FUNCTION hour '2009-08-07'))) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 86)))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: hour('2009-08-07 13:14:15') - type: int - expr: hour('13:14:15') - type: int - expr: hour('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key = 86) + type: boolean + Select Operator + expressions: + expr: hour('2009-08-07 13:14:15') + type: int + expr: hour('13:14:15') + type: int + expr: hour('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT hour('2009-08-07 13:14:15'), hour('13:14:15'), hour('2009-08-07') diff --git a/ql/src/test/results/clientpositive/udf_if.q.out b/ql/src/test/results/clientpositive/udf_if.q.out index 35e0b50..e7de5d1 100644 --- a/ql/src/test/results/clientpositive/udf_if.q.out +++ b/ql/src/test/results/clientpositive/udf_if.q.out @@ -15,7 +15,7 @@ SELECT IF(TRUE, 1, 2) AS COL1, IF(2=2, 1, NULL) AS COL4, IF(2=2, NULL, 1) AS COL5, IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT IF(TRUE, 1, 2) AS COL1, @@ -24,49 +24,38 @@ SELECT IF(TRUE, 1, 2) AS COL1, IF(2=2, 1, NULL) AS COL4, IF(2=2, NULL, 1) AS COL5, IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2) COL1) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_STRING TOK_NULL) (TOK_FUNCTION TOK_STRING 1)) COL2) (TOK_SELEXPR (TOK_FUNCTION IF (= 1 1) (TOK_FUNCTION IF (= 2 2) 1 2) (TOK_FUNCTION IF (= 3 3) 3 4)) COL3) (TOK_SELEXPR (TOK_FUNCTION IF (= 2 2) 1 TOK_NULL) COL4) (TOK_SELEXPR (TOK_FUNCTION IF (= 2 2) TOK_NULL 1) COL5) (TOK_SELEXPR (TOK_FUNCTION IF (TOK_FUNCTION IF TRUE TOK_NULL FALSE) 1 2) COL6)) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2) COL1) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_STRING TOK_NULL) (TOK_FUNCTION TOK_STRING 1)) COL2) (TOK_SELEXPR (TOK_FUNCTION IF (= 1 1) (TOK_FUNCTION IF (= 2 2) 1 2) (TOK_FUNCTION IF (= 3 3) 3 4)) COL3) (TOK_SELEXPR (TOK_FUNCTION IF (= 2 2) 1 TOK_NULL) COL4) (TOK_SELEXPR (TOK_FUNCTION IF (= 2 2) TOK_NULL 1) COL5) (TOK_SELEXPR (TOK_FUNCTION IF (TOK_FUNCTION IF TRUE TOK_NULL FALSE) 1 2) COL6)))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: if(true, 1, 2) - type: int - expr: if(false, UDFToString(null), UDFToString(1)) - type: string - expr: if((1 = 1), if((2 = 2), 1, 2), if((3 = 3), 3, 4)) - type: int - expr: if((2 = 2), 1, null) - type: int - expr: if((2 = 2), null, 1) - type: int - expr: if(if(true, null, false), 1, 2) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: if(true, 1, 2) + type: int + expr: if(false, UDFToString(null), UDFToString(1)) + type: string + expr: if((1 = 1), if((2 = 2), 1, 2), if((3 = 3), 3, 4)) + type: int + expr: if((2 = 2), 1, null) + type: int + expr: if((2 = 2), null, 1) + type: int + expr: if(if(true, null, false), 1, 2) + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + ListSink PREHOOK: query: SELECT IF(TRUE, 1, 2) AS COL1, @@ -75,7 +64,7 @@ PREHOOK: query: SELECT IF(TRUE, 1, 2) AS COL1, IF(2=2, 1, NULL) AS COL4, IF(2=2, NULL, 1) AS COL5, IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -85,7 +74,7 @@ POSTHOOK: query: SELECT IF(TRUE, 1, 2) AS COL1, IF(2=2, 1, NULL) AS COL4, IF(2=2, NULL, 1) AS COL5, IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -96,7 +85,7 @@ SELECT IF(TRUE, CAST(128 AS SMALLINT), CAST(1 AS TINYINT)) AS COL1, IF(FALSE, 1, 1.1) AS COL2, IF(FALSE, 1, 'ABC') AS COL3, IF(FALSE, 'ABC', 12.3) AS COL4 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: -- Type conversions EXPLAIN @@ -104,52 +93,41 @@ SELECT IF(TRUE, CAST(128 AS SMALLINT), CAST(1 AS TINYINT)) AS COL1, IF(FALSE, 1, 1.1) AS COL2, IF(FALSE, 1, 'ABC') AS COL3, IF(FALSE, 'ABC', 12.3) AS COL4 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE (TOK_FUNCTION TOK_SMALLINT 128) (TOK_FUNCTION TOK_TINYINT 1)) COL1) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 1.1) COL2) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 'ABC') COL3) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 'ABC' 12.3) COL4)) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE (TOK_FUNCTION TOK_SMALLINT 128) (TOK_FUNCTION TOK_TINYINT 1)) COL1) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 1.1) COL2) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 'ABC') COL3) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 'ABC' 12.3) COL4)))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: if(true, UDFToShort(128), UDFToByte(1)) - type: smallint - expr: if(false, 1, 1.1) - type: double - expr: if(false, 1, 'ABC') - type: string - expr: if(false, 'ABC', 12.3) - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: if(true, UDFToShort(128), UDFToByte(1)) + type: smallint + expr: if(false, 1, 1.1) + type: double + expr: if(false, 1, 'ABC') + type: string + expr: if(false, 'ABC', 12.3) + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + ListSink PREHOOK: query: SELECT IF(TRUE, CAST(128 AS SMALLINT), CAST(1 AS TINYINT)) AS COL1, IF(FALSE, 1, 1.1) AS COL2, IF(FALSE, 1, 'ABC') AS COL3, IF(FALSE, 'ABC', 12.3) AS COL4 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -157,7 +135,7 @@ POSTHOOK: query: SELECT IF(TRUE, CAST(128 AS SMALLINT), CAST(1 AS TINYINT)) AS C IF(FALSE, 1, 1.1) AS COL2, IF(FALSE, 1, 'ABC') AS COL3, IF(FALSE, 'ABC', 12.3) AS COL4 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_in.q.out b/ql/src/test/results/clientpositive/udf_in.q.out index 84d75fc..da07f61 100644 --- a/ql/src/test/results/clientpositive/udf_in.q.out +++ b/ql/src/test/results/clientpositive/udf_in.q.out @@ -8,7 +8,7 @@ PREHOOK: query: SELECT 1 IN (1, 2, 3), 1 IN (1, 2, 3) OR false IN(false), NULL IN (1, 2, 3), 4 IN (1, 2, 3, NULL), - (1+3) IN (5, 6, (1+2) + 1) FROM src LIMIT 1 + (1+3) IN (5, 6, (1+2) + 1) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -22,7 +22,7 @@ POSTHOOK: query: SELECT 1 IN (1, 2, 3), 1 IN (1, 2, 3) OR false IN(false), NULL IN (1, 2, 3), 4 IN (1, 2, 3, NULL), - (1+3) IN (5, 6, (1+2) + 1) FROM src LIMIT 1 + (1+3) IN (5, 6, (1+2) + 1) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_instr.q.out b/ql/src/test/results/clientpositive/udf_instr.q.out index dd618e8..ef89128 100644 --- a/ql/src/test/results/clientpositive/udf_instr.q.out +++ b/ql/src/test/results/clientpositive/udf_instr.q.out @@ -25,7 +25,7 @@ SELECT instr('abcd', 'abc'), instr(CAST(16.0 AS DOUBLE), '.0'), instr(null, 'abc'), instr('abcd', null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT instr('abcd', 'abc'), @@ -41,63 +41,52 @@ SELECT instr('abcd', 'abc'), instr(CAST(16.0 AS DOUBLE), '.0'), instr(null, 'abc'), instr('abcd', null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION instr 'abcd' 'abc')) (TOK_SELEXPR (TOK_FUNCTION instr 'abcabc' 'ccc')) (TOK_SELEXPR (TOK_FUNCTION instr 123 '23')) (TOK_SELEXPR (TOK_FUNCTION instr 123 23)) (TOK_SELEXPR (TOK_FUNCTION instr TRUE 1)) (TOK_SELEXPR (TOK_FUNCTION instr FALSE 1)) (TOK_SELEXPR (TOK_FUNCTION instr '12345' (TOK_FUNCTION TOK_TINYINT '2'))) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_SMALLINT '12345') '34')) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_BIGINT '123456789012') '456')) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_FLOAT 1.25) '.25')) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_DOUBLE 16.0) '.0')) (TOK_SELEXPR (TOK_FUNCTION instr TOK_NULL 'abc')) (TOK_SELEXPR (TOK_FUNCTION instr 'abcd' TOK_NULL))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION instr 'abcd' 'abc')) (TOK_SELEXPR (TOK_FUNCTION instr 'abcabc' 'ccc')) (TOK_SELEXPR (TOK_FUNCTION instr 123 '23')) (TOK_SELEXPR (TOK_FUNCTION instr 123 23)) (TOK_SELEXPR (TOK_FUNCTION instr TRUE 1)) (TOK_SELEXPR (TOK_FUNCTION instr FALSE 1)) (TOK_SELEXPR (TOK_FUNCTION instr '12345' (TOK_FUNCTION TOK_TINYINT '2'))) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_SMALLINT '12345') '34')) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_BIGINT '123456789012') '456')) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_FLOAT 1.25) '.25')) (TOK_SELEXPR (TOK_FUNCTION instr (TOK_FUNCTION TOK_DOUBLE 16.0) '.0')) (TOK_SELEXPR (TOK_FUNCTION instr TOK_NULL 'abc')) (TOK_SELEXPR (TOK_FUNCTION instr 'abcd' TOK_NULL))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: instr('abcd''abc') - type: int - expr: instr('abcabc''ccc') - type: int - expr: instr(123'23') - type: int - expr: instr(12323) - type: int - expr: instr(true1) - type: int - expr: instr(false1) - type: int - expr: instr('12345'UDFToByte('2')) - type: int - expr: instr(UDFToShort('12345')'34') - type: int - expr: instr(UDFToLong('123456789012')'456') - type: int - expr: instr(UDFToFloat(1.25)'.25') - type: int - expr: instr(16.0'.0') - type: int - expr: instr(null'abc') - type: int - expr: instr('abcd'null) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: instr('abcd''abc') + type: int + expr: instr('abcabc''ccc') + type: int + expr: instr(123'23') + type: int + expr: instr(12323) + type: int + expr: instr(true1) + type: int + expr: instr(false1) + type: int + expr: instr('12345'UDFToByte('2')) + type: int + expr: instr(UDFToShort('12345')'34') + type: int + expr: instr(UDFToLong('123456789012')'456') + type: int + expr: instr(UDFToFloat(1.25)'.25') + type: int + expr: instr(16.0'.0') + type: int + expr: instr(null'abc') + type: int + expr: instr('abcd'null) + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + ListSink PREHOOK: query: SELECT instr('abcd', 'abc'), @@ -113,7 +102,7 @@ PREHOOK: query: SELECT instr('abcd', 'abc'), instr(CAST(16.0 AS DOUBLE), '.0'), instr(null, 'abc'), instr('abcd', null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -130,7 +119,7 @@ POSTHOOK: query: SELECT instr('abcd', 'abc'), instr(CAST(16.0 AS DOUBLE), '.0'), instr(null, 'abc'), instr('abcd', null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out b/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out index ed0757d..9871778 100644 --- a/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out +++ b/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out @@ -36,41 +36,30 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_ISNULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL 'my string'))) (TOK_WHERE (TOK_FUNCTION TOK_ISNOTNULL true)) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: true is not null - type: boolean - Select Operator - expressions: - expr: null is null - type: boolean - expr: 1 is not null - type: boolean - expr: 'my string' is not null - type: boolean - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: true is not null + type: boolean + Select Operator + expressions: + expr: null is null + type: boolean + expr: 1 is not null + type: boolean + expr: 'my string' is not null + type: boolean + outputColumnNames: _col0, _col1, _col2 + Limit + ListSink PREHOOK: query: SELECT NULL IS NULL, @@ -110,41 +99,30 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint))) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lintstring))) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint)) (NOT (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring))))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src_thrift - TableScan - alias: src_thrift - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: lint is not null - type: boolean - expr: lintstring is not null - type: boolean - expr: mstringstring is not null - type: boolean - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src_thrift + Filter Operator + predicate: + expr: (lint is not null and (not mstringstring is null)) + type: boolean + Select Operator + expressions: + expr: lint is not null + type: boolean + expr: lintstring is not null + type: boolean + expr: mstringstring is not null + type: boolean + outputColumnNames: _col0, _col1, _col2 + Limit + ListSink PREHOOK: query: FROM src_thrift diff --git a/ql/src/test/results/clientpositive/udf_java_method.q.out b/ql/src/test/results/clientpositive/udf_java_method.q.out index 45f112f..022096b 100644 --- a/ql/src/test/results/clientpositive/udf_java_method.q.out +++ b/ql/src/test/results/clientpositive/udf_java_method.q.out @@ -21,7 +21,7 @@ SELECT java_method("java.lang.String", "valueOf", 1), java_method("java.lang.Math", "round", 2.5), java_method("java.lang.Math", "exp", 1.0), java_method("java.lang.Math", "floor", 1.9) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: -- java_method() is a synonym for reflect() @@ -33,112 +33,41 @@ SELECT java_method("java.lang.String", "valueOf", 1), java_method("java.lang.Math", "round", 2.5), java_method("java.lang.Math", "exp", 1.0), java_method("java.lang.Math", "floor", 1.9) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.String" "valueOf" 1)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.String" "isEmpty")) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "max" 2 3)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "min" 2 3)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "round" 2.5)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "exp" 1.0)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "floor" 1.9))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.String" "valueOf" 1)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.String" "isEmpty")) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "max" 2 3)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "min" 2 3)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "round" 2.5)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "exp" 1.0)) (TOK_SELEXPR (TOK_FUNCTION java_method "java.lang.Math" "floor" 1.9))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - GatherStats: false - Select Operator - expressions: - expr: reflect('java.lang.String','valueOf',1) - type: string - expr: reflect('java.lang.String','isEmpty') - type: string - expr: reflect('java.lang.Math','max',2,3) - type: string - expr: reflect('java.lang.Math','min',2,3) - type: string - expr: reflect('java.lang.Math','round',2.5) - type: string - expr: reflect('java.lang.Math','exp',1.0) - type: string - expr: reflect('java.lang.Math','floor',1.9) - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3,_col4,_col5,_col6 - columns.types string:string:string:string:string:string:string - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: src - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src - name: default.src - Truncated Path -> Alias: - /src [src] - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + GatherStats: false + Select Operator + expressions: + expr: reflect('java.lang.String','valueOf',1) + type: string + expr: reflect('java.lang.String','isEmpty') + type: string + expr: reflect('java.lang.Math','max',2,3) + type: string + expr: reflect('java.lang.Math','min',2,3) + type: string + expr: reflect('java.lang.Math','round',2.5) + type: string + expr: reflect('java.lang.Math','exp',1.0) + type: string + expr: reflect('java.lang.Math','floor',1.9) + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + ListSink PREHOOK: query: SELECT java_method("java.lang.String", "valueOf", 1), @@ -148,7 +77,7 @@ PREHOOK: query: SELECT java_method("java.lang.String", "valueOf", 1), java_method("java.lang.Math", "round", 2.5), java_method("java.lang.Math", "exp", 1.0), java_method("java.lang.Math", "floor", 1.9) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -159,7 +88,7 @@ POSTHOOK: query: SELECT java_method("java.lang.String", "valueOf", 1), java_method("java.lang.Math", "round", 2.5), java_method("java.lang.Math", "exp", 1.0), java_method("java.lang.Math", "floor", 1.9) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_length.q.out b/ql/src/test/results/clientpositive/udf_length.q.out index 691a1a8..01ede25 100644 --- a/ql/src/test/results/clientpositive/udf_length.q.out +++ b/ql/src/test/results/clientpositive/udf_length.q.out @@ -187,32 +187,21 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION length (. (TOK_TABLE_OR_COL dest1) name)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - dest1 - TableScan - alias: dest1 - Select Operator - expressions: - expr: length(name) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: dest1 + Select Operator + expressions: + expr: length(name) + type: int + outputColumnNames: _col0 + ListSink PREHOOK: query: SELECT length(dest1.name) FROM dest1 diff --git a/ql/src/test/results/clientpositive/udf_lessthan.q.out b/ql/src/test/results/clientpositive/udf_lessthan.q.out index 1ef5c3c..630d502 100644 --- a/ql/src/test/results/clientpositive/udf_lessthan.q.out +++ b/ql/src/test/results/clientpositive/udf_lessthan.q.out @@ -8,11 +8,11 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: DESCRIBE FUNCTION EXTENDED < POSTHOOK: type: DESCFUNCTION a < b - Returns TRUE if a is less than b -PREHOOK: query: SELECT true Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: ('_%_' like '%\_\%\_%') - type: boolean - expr: ('__' like '%\_\%\_%') - type: boolean - expr: ('%%_%_' like '%\_\%\_%') - type: boolean - expr: ('%_%_%' like '%\%\_\%') - type: boolean - expr: ('_%_' like '\%\_%') - type: boolean - expr: ('%__' like '__\%%') - type: boolean - expr: ('_%' like '\_\%\_\%%') - type: boolean - expr: ('_%' like '\_\%_%') - type: boolean - expr: ('%_' like '\%\_') - type: boolean - expr: ('ab' like '\%\_') - type: boolean - expr: ('ab' like '_a%') - type: boolean - expr: ('ab' like 'a') - type: boolean - expr: ('ab' like '') - type: boolean - expr: ('' like '') - type: boolean - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key = 86) + type: boolean + Select Operator + expressions: + expr: ('_%_' like '%\_\%\_%') + type: boolean + expr: ('__' like '%\_\%\_%') + type: boolean + expr: ('%%_%_' like '%\_\%\_%') + type: boolean + expr: ('%_%_%' like '%\%\_\%') + type: boolean + expr: ('_%_' like '\%\_%') + type: boolean + expr: ('%__' like '__\%%') + type: boolean + expr: ('_%' like '\_\%\_\%%') + type: boolean + expr: ('_%' like '\_\%_%') + type: boolean + expr: ('%_' like '\%\_') + type: boolean + expr: ('ab' like '\%\_') + type: boolean + expr: ('ab' like '_a%') + type: boolean + expr: ('ab' like 'a') + type: boolean + expr: ('ab' like '') + type: boolean + expr: ('' like '') + type: boolean + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + ListSink PREHOOK: query: SELECT '_%_' LIKE '%\_\%\_%', '__' LIKE '%\_\%\_%', '%%_%_' LIKE '%\_\%\_%', '%_%_%' LIKE '%\%\_\%', @@ -105,7 +94,7 @@ PREHOOK: query: SELECT '1+2' LIKE '_+_', '112' LIKE '1+_', '|||' LIKE '|_|', '+++' LIKE '1+_' -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -114,7 +103,7 @@ POSTHOOK: query: SELECT '1+2' LIKE '_+_', '112' LIKE '1+_', '|||' LIKE '|_|', '+++' LIKE '1+_' -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_locate.q.out b/ql/src/test/results/clientpositive/udf_locate.q.out index b837c81..b646b21 100644 --- a/ql/src/test/results/clientpositive/udf_locate.q.out +++ b/ql/src/test/results/clientpositive/udf_locate.q.out @@ -29,7 +29,7 @@ SELECT locate('abc', 'abcd'), locate('abc', null), locate('abc', 'abcd', null), locate('abc', 'abcd', 'invalid number') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT locate('abc', 'abcd'), @@ -49,71 +49,60 @@ SELECT locate('abc', 'abcd'), locate('abc', null), locate('abc', 'abcd', null), locate('abc', 'abcd', 'invalid number') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcd')) (TOK_SELEXPR (TOK_FUNCTION locate 'ccc' 'abcabc')) (TOK_SELEXPR (TOK_FUNCTION locate '23' 123)) (TOK_SELEXPR (TOK_FUNCTION locate 23 123)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcabc' 2)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcabc' '2')) (TOK_SELEXPR (TOK_FUNCTION locate 1 TRUE)) (TOK_SELEXPR (TOK_FUNCTION locate 1 FALSE)) (TOK_SELEXPR (TOK_FUNCTION locate (TOK_FUNCTION TOK_TINYINT '2') '12345')) (TOK_SELEXPR (TOK_FUNCTION locate '34' (TOK_FUNCTION TOK_SMALLINT '12345'))) (TOK_SELEXPR (TOK_FUNCTION locate '456' (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION locate '.25' (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION locate '.0' (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION locate TOK_NULL 'abc')) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcd' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcd' 'invalid number'))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcd')) (TOK_SELEXPR (TOK_FUNCTION locate 'ccc' 'abcabc')) (TOK_SELEXPR (TOK_FUNCTION locate '23' 123)) (TOK_SELEXPR (TOK_FUNCTION locate 23 123)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcabc' 2)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcabc' '2')) (TOK_SELEXPR (TOK_FUNCTION locate 1 TRUE)) (TOK_SELEXPR (TOK_FUNCTION locate 1 FALSE)) (TOK_SELEXPR (TOK_FUNCTION locate (TOK_FUNCTION TOK_TINYINT '2') '12345')) (TOK_SELEXPR (TOK_FUNCTION locate '34' (TOK_FUNCTION TOK_SMALLINT '12345'))) (TOK_SELEXPR (TOK_FUNCTION locate '456' (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION locate '.25' (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION locate '.0' (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION locate TOK_NULL 'abc')) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcd' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION locate 'abc' 'abcd' 'invalid number'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: locate('abc''abcd') - type: int - expr: locate('ccc''abcabc') - type: int - expr: locate('23'123) - type: int - expr: locate(23123) - type: int - expr: locate('abc''abcabc'2) - type: int - expr: locate('abc''abcabc''2') - type: int - expr: locate(1true) - type: int - expr: locate(1false) - type: int - expr: locate(UDFToByte('2')'12345') - type: int - expr: locate('34'UDFToShort('12345')) - type: int - expr: locate('456'UDFToLong('123456789012')) - type: int - expr: locate('.25'UDFToFloat(1.25)) - type: int - expr: locate('.0'16.0) - type: int - expr: locate(null'abc') - type: int - expr: locate('abc'null) - type: int - expr: locate('abc''abcd'null) - type: int - expr: locate('abc''abcd''invalid number') - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: locate('abc''abcd') + type: int + expr: locate('ccc''abcabc') + type: int + expr: locate('23'123) + type: int + expr: locate(23123) + type: int + expr: locate('abc''abcabc'2) + type: int + expr: locate('abc''abcabc''2') + type: int + expr: locate(1true) + type: int + expr: locate(1false) + type: int + expr: locate(UDFToByte('2')'12345') + type: int + expr: locate('34'UDFToShort('12345')) + type: int + expr: locate('456'UDFToLong('123456789012')) + type: int + expr: locate('.25'UDFToFloat(1.25)) + type: int + expr: locate('.0'16.0) + type: int + expr: locate(null'abc') + type: int + expr: locate('abc'null) + type: int + expr: locate('abc''abcd'null) + type: int + expr: locate('abc''abcd''invalid number') + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 + ListSink PREHOOK: query: SELECT locate('abc', 'abcd'), @@ -133,7 +122,7 @@ PREHOOK: query: SELECT locate('abc', 'abcd'), locate('abc', null), locate('abc', 'abcd', null), locate('abc', 'abcd', 'invalid number') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -154,7 +143,7 @@ POSTHOOK: query: SELECT locate('abc', 'abcd'), locate('abc', null), locate('abc', 'abcd', null), locate('abc', 'abcd', 'invalid number') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_lpad.q.out b/ql/src/test/results/clientpositive/udf_lpad.q.out index eb186ca..40eb975 100644 --- a/ql/src/test/results/clientpositive/udf_lpad.q.out +++ b/ql/src/test/results/clientpositive/udf_lpad.q.out @@ -17,56 +17,45 @@ PREHOOK: query: EXPLAIN SELECT lpad('hi', 1, '?'), lpad('hi', 5, '.'), lpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT lpad('hi', 1, '?'), lpad('hi', 5, '.'), lpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION lpad 'hi' 1 '?')) (TOK_SELEXPR (TOK_FUNCTION lpad 'hi' 5 '.')) (TOK_SELEXPR (TOK_FUNCTION lpad 'hi' 6 '123'))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION lpad 'hi' 1 '?')) (TOK_SELEXPR (TOK_FUNCTION lpad 'hi' 5 '.')) (TOK_SELEXPR (TOK_FUNCTION lpad 'hi' 6 '123'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: lpad('hi', 1, '?') - type: string - expr: lpad('hi', 5, '.') - type: string - expr: lpad('hi', 6, '123') - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: lpad('hi', 1, '?') + type: string + expr: lpad('hi', 5, '.') + type: string + expr: lpad('hi', 6, '123') + type: string + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT lpad('hi', 1, '?'), lpad('hi', 5, '.'), lpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -74,7 +63,7 @@ POSTHOOK: query: SELECT lpad('hi', 1, '?'), lpad('hi', 5, '.'), lpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_map.q.out b/ql/src/test/results/clientpositive/udf_map.q.out index 7f30c3e..8e78131 100644 --- a/ql/src/test/results/clientpositive/udf_map.q.out +++ b/ql/src/test/results/clientpositive/udf_map.q.out @@ -9,61 +9,50 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTENDED map POSTHOOK: type: DESCFUNCTION map(key0, value0, key1, value1...) - Creates a map with the given key/value pairs PREHOOK: query: EXPLAIN SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), -map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1 +map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), -map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1 +map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION map)) (TOK_SELEXPR (TOK_FUNCTION map 1 "a" 2 "b" 3 "c")) (TOK_SELEXPR (TOK_FUNCTION map 1 2 "a" "b")) (TOK_SELEXPR ([ (TOK_FUNCTION map 1 "a" 2 "b" 3 "c") 2)) (TOK_SELEXPR ([ (TOK_FUNCTION map 1 2 "a" "b") "a")) (TOK_SELEXPR ([ ([ (TOK_FUNCTION map 1 (TOK_FUNCTION array "a")) 1) 0))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION map)) (TOK_SELEXPR (TOK_FUNCTION map 1 "a" 2 "b" 3 "c")) (TOK_SELEXPR (TOK_FUNCTION map 1 2 "a" "b")) (TOK_SELEXPR ([ (TOK_FUNCTION map 1 "a" 2 "b" 3 "c") 2)) (TOK_SELEXPR ([ (TOK_FUNCTION map 1 2 "a" "b") "a")) (TOK_SELEXPR ([ ([ (TOK_FUNCTION map 1 (TOK_FUNCTION array "a")) 1) 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: map() - type: map - expr: map(1:'a',2:'b',3:'c') - type: map - expr: map(1:2,'a':'b') - type: map - expr: map(1:'a',2:'b',3:'c')[2] - type: string - expr: map(1:2,'a':'b')['a'] - type: string - expr: map(1:array('a'))[1][0] - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: map() + type: map + expr: map(1:'a',2:'b',3:'c') + type: map + expr: map(1:2,'a':'b') + type: map + expr: map(1:'a',2:'b',3:'c')[2] + type: string + expr: map(1:2,'a':'b')['a'] + type: string + expr: map(1:array('a'))[1][0] + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + ListSink PREHOOK: query: SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), -map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1 +map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), -map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1 +map(1, "a", 2, "b", 3, "c")[2], map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_map_keys.q.out b/ql/src/test/results/clientpositive/udf_map_keys.q.out index 5001686..4cb5a17 100644 --- a/ql/src/test/results/clientpositive/udf_map_keys.q.out +++ b/ql/src/test/results/clientpositive/udf_map_keys.q.out @@ -17,23 +17,23 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTENDED map_keys POSTHOOK: type: DESCFUNCTION map_keys(map) - Returns an unordered array containing the keys of the input map. PREHOOK: query: -- Evaluate function against INT valued keys -SELECT map_keys(map(1, "a", 2, "b", 3, "c")) FROM src LIMIT 1 +SELECT map_keys(map(1, "a", 2, "b", 3, "c")) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Evaluate function against INT valued keys -SELECT map_keys(map(1, "a", 2, "b", 3, "c")) FROM src LIMIT 1 +SELECT map_keys(map(1, "a", 2, "b", 3, "c")) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### [1,2,3] PREHOOK: query: -- Evaluate function against STRING valued keys -SELECT map_keys(map("a", 1, "b", 2, "c", 3)) FROM src LIMIT 1 +SELECT map_keys(map("a", 1, "b", 2, "c", 3)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Evaluate function against STRING valued keys -SELECT map_keys(map("a", 1, "b", 2, "c", 3)) FROM src LIMIT 1 +SELECT map_keys(map("a", 1, "b", 2, "c", 3)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_map_values.q.out b/ql/src/test/results/clientpositive/udf_map_values.q.out index 8174112..1f78b0e 100644 --- a/ql/src/test/results/clientpositive/udf_map_values.q.out +++ b/ql/src/test/results/clientpositive/udf_map_values.q.out @@ -17,23 +17,23 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTENDED map_values POSTHOOK: type: DESCFUNCTION map_values(map) - Returns an unordered array containing the values of the input map. PREHOOK: query: -- Evaluate function against STRING valued values -SELECT map_values(map(1, "a", 2, "b", 3, "c")) FROM src LIMIT 1 +SELECT map_values(map(1, "a", 2, "b", 3, "c")) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Evaluate function against STRING valued values -SELECT map_values(map(1, "a", 2, "b", 3, "c")) FROM src LIMIT 1 +SELECT map_values(map(1, "a", 2, "b", 3, "c")) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ["a","b","c"] PREHOOK: query: -- Evaluate function against INT valued keys -SELECT map_values(map("a", 1, "b", 2, "c", 3)) FROM src LIMIT 1 +SELECT map_values(map("a", 1, "b", 2, "c", 3)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Evaluate function against INT valued keys -SELECT map_values(map("a", 1, "b", 2, "c", 3)) FROM src LIMIT 1 +SELECT map_values(map("a", 1, "b", 2, "c", 3)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_named_struct.q.out b/ql/src/test/results/clientpositive/udf_named_struct.q.out index f8d32d1..18a8870 100644 --- a/ql/src/test/results/clientpositive/udf_named_struct.q.out +++ b/ql/src/test/results/clientpositive/udf_named_struct.q.out @@ -10,54 +10,43 @@ POSTHOOK: type: DESCFUNCTION named_struct(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values PREHOOK: query: EXPLAIN SELECT named_struct("foo", 1, "bar", 2), - named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1 + named_struct("foo", 1, "bar", 2).foo FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT named_struct("foo", 1, "bar", 2), - named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1 + named_struct("foo", 1, "bar", 2).foo FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION named_struct "foo" 1 "bar" 2)) (TOK_SELEXPR (. (TOK_FUNCTION named_struct "foo" 1 "bar" 2) foo))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION named_struct "foo" 1 "bar" 2)) (TOK_SELEXPR (. (TOK_FUNCTION named_struct "foo" 1 "bar" 2) foo))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: named_struct('foo',1,'bar',2) - type: struct - expr: named_struct('foo',1,'bar',2).foo - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: named_struct('foo',1,'bar',2) + type: struct + expr: named_struct('foo',1,'bar',2).foo + type: int + outputColumnNames: _col0, _col1 + ListSink PREHOOK: query: SELECT named_struct("foo", 1, "bar", 2), - named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1 + named_struct("foo", 1, "bar", 2).foo FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT named_struct("foo", 1, "bar", 2), - named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1 + named_struct("foo", 1, "bar", 2).foo FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_negative.q.out b/ql/src/test/results/clientpositive/udf_negative.q.out index 7a29a9b..7863688 100644 --- a/ql/src/test/results/clientpositive/udf_negative.q.out +++ b/ql/src/test/results/clientpositive/udf_negative.q.out @@ -20,56 +20,56 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: DESCRIBE FUNCTION EXTENDED - POSTHOOK: type: DESCFUNCTION a - b - Returns the difference a-b -PREHOOK: query: select - null from src limit 1 +PREHOOK: query: select - null from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select - null from src limit 1 +POSTHOOK: query: select - null from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: select - cast(null as int) from src limit 1 +PREHOOK: query: select - cast(null as int) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select - cast(null as int) from src limit 1 +POSTHOOK: query: select - cast(null as int) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: select - cast(null as smallint) from src limit 1 +PREHOOK: query: select - cast(null as smallint) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select - cast(null as smallint) from src limit 1 +POSTHOOK: query: select - cast(null as smallint) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: select - cast(null as bigint) from src limit 1 +PREHOOK: query: select - cast(null as bigint) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select - cast(null as bigint) from src limit 1 +POSTHOOK: query: select - cast(null as bigint) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: select - cast(null as double) from src limit 1 +PREHOOK: query: select - cast(null as double) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select - cast(null as double) from src limit 1 +POSTHOOK: query: select - cast(null as double) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: select - cast(null as float) from src limit 1 +PREHOOK: query: select - cast(null as float) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select - cast(null as float) from src limit 1 +POSTHOOK: query: select - cast(null as float) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_notequal.q.out b/ql/src/test/results/clientpositive/udf_notequal.q.out index b9182fb..cda48aa 100644 --- a/ql/src/test/results/clientpositive/udf_notequal.q.out +++ b/ql/src/test/results/clientpositive/udf_notequal.q.out @@ -34,38 +34,27 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (<> (TOK_TABLE_OR_COL key) '302')))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key <> '302') + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + ListSink PREHOOK: query: SELECT key, value @@ -593,38 +582,27 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (!= (TOK_TABLE_OR_COL key) '302')))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key <> '302') + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + ListSink PREHOOK: query: SELECT key, value diff --git a/ql/src/test/results/clientpositive/udf_notop.q.out b/ql/src/test/results/clientpositive/udf_notop.q.out index 3deff24..25483c7 100644 --- a/ql/src/test/results/clientpositive/udf_notop.q.out +++ b/ql/src/test/results/clientpositive/udf_notop.q.out @@ -7,7 +7,7 @@ PREHOOK: query: SELECT 1 NOT IN (1, 2, 3), "abc" NOT RLIKE "^bc", "abc" NOT REGEXP "^ab", "abc" NOT REGEXP "^bc", - 1 IN (1, 2) AND "abc" NOT LIKE "bc%" FROM src LIMIT 1 + 1 IN (1, 2) AND "abc" NOT LIKE "bc%" FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -20,7 +20,7 @@ POSTHOOK: query: SELECT 1 NOT IN (1, 2, 3), "abc" NOT RLIKE "^bc", "abc" NOT REGEXP "^ab", "abc" NOT REGEXP "^bc", - 1 IN (1, 2) AND "abc" NOT LIKE "bc%" FROM src LIMIT 1 + 1 IN (1, 2) AND "abc" NOT LIKE "bc%" FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_nvl.q.out b/ql/src/test/results/clientpositive/udf_nvl.q.out index 7a32377..4e5b220 100644 --- a/ql/src/test/results/clientpositive/udf_nvl.q.out +++ b/ql/src/test/results/clientpositive/udf_nvl.q.out @@ -14,57 +14,46 @@ Example: PREHOOK: query: EXPLAIN SELECT NVL( 1 , 2 ) AS COL1, NVL( NULL, 5 ) AS COL2 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT NVL( 1 , 2 ) AS COL1, NVL( NULL, 5 ) AS COL2 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION NVL 1 2) COL1) (TOK_SELEXPR (TOK_FUNCTION NVL TOK_NULL 5) COL2)) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION NVL 1 2) COL1) (TOK_SELEXPR (TOK_FUNCTION NVL TOK_NULL 5) COL2)))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: if 1 is null returns2 - type: int - expr: if null is null returns5 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: if 1 is null returns2 + type: int + expr: if null is null returns5 + type: int + outputColumnNames: _col0, _col1 + ListSink PREHOOK: query: SELECT NVL( 1 , 2 ) AS COL1, NVL( NULL, 5 ) AS COL2 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT NVL( 1 , 2 ) AS COL1, NVL( NULL, 5 ) AS COL2 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_pmod.q.out b/ql/src/test/results/clientpositive/udf_pmod.q.out index f28caa6..0c77243 100644 --- a/ql/src/test/results/clientpositive/udf_pmod.q.out +++ b/ql/src/test/results/clientpositive/udf_pmod.q.out @@ -9,88 +9,88 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTENDED pmod POSTHOOK: type: DESCFUNCTION a pmod b - Compute the positive modulo PREHOOK: query: SELECT pmod(null, null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT pmod(null, null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT pmod(-100,9), pmod(-50,101), pmod(-1000,29) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT pmod(-100,9), pmod(-50,101), pmod(-1000,29) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 8 51 15 PREHOOK: query: SELECT pmod(100,19), pmod(50,125), pmod(300,15) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT pmod(100,19), pmod(50,125), pmod(300,15) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 5 50 0 -PREHOOK: query: SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src LIMIT 1 +PREHOOK: query: SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src LIMIT 1 +POSTHOOK: query: SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 8 51 16 -PREHOOK: query: SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src LIMIT 1 +PREHOOK: query: SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src LIMIT 1 +POSTHOOK: query: SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 8 51 16 -PREHOOK: query: SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src LIMIT 1 +PREHOOK: query: SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src LIMIT 1 +POSTHOOK: query: SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 8 51 16 -PREHOOK: query: SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src LIMIT 1 +PREHOOK: query: SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src LIMIT 1 +POSTHOOK: query: SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 6.8899984 51.700005 18.089996 -PREHOOK: query: SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src LIMIT 1 +PREHOOK: query: SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src LIMIT 1 +POSTHOOK: query: SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 6.890000000000011 51.699999999999996 18.090000000000003 -PREHOOK: query: SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src LIMIT 1 +PREHOOK: query: SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src LIMIT 1 +POSTHOOK: query: SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_printf.q.out b/ql/src/test/results/clientpositive/udf_printf.q.out index 9cc2158..5317789 100644 --- a/ql/src/test/results/clientpositive/udf_printf.q.out +++ b/ql/src/test/results/clientpositive/udf_printf.q.out @@ -20,75 +20,65 @@ Example: > SELECT printf("Hello World %d %s", 100, "days")FROM src LIMIT 1; "Hello World 100 days" PREHOOK: query: EXPLAIN -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION printf "Hello World %d %s" 100 "days"))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION printf "Hello World %d %s" 100 "days"))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: printf('Hello World %d %s', 100, 'days') - type: string - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: printf('Hello World %d %s', 100, 'days') + type: string + outputColumnNames: _col0 + ListSink PREHOOK: query: -- Test Primitive Types -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Test Primitive Types -SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1 +SELECT printf("Hello World %d %s", 100, "days") FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### Hello World 100 days -PREHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1 +PREHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1 +POSTHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### All Type Test: false, A, 15000, 1.234000e+01, +27183.2401, 2300.41, 32, corret, 0x1.002p8 PREHOOK: query: -- Test NULL Values -SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1 +SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Test NULL Values -SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1 +SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### Color red, String Null: null, number1 123456, number2 00089, Integer Null: null, hex 0xff, float 3.14 Double Null: null + PREHOOK: query: -- Test Timestamp create table timestamp_udf (t timestamp) PREHOOK: type: CREATETABLE @@ -96,15 +86,15 @@ POSTHOOK: query: -- Test Timestamp create table timestamp_udf (t timestamp) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@timestamp_udf -PREHOOK: query: from src +PREHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@timestamp_udf -POSTHOOK: query: from src +POSTHOOK: query: from (select * from src tablesample (1 rows)) s insert overwrite table timestamp_udf - select '2011-05-06 07:08:09.1234567' limit 1 + select '2011-05-06 07:08:09.1234567' POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@timestamp_udf diff --git a/ql/src/test/results/clientpositive/udf_radians.q.out b/ql/src/test/results/clientpositive/udf_radians.q.out index a838926..c5731a5 100644 --- a/ql/src/test/results/clientpositive/udf_radians.q.out +++ b/ql/src/test/results/clientpositive/udf_radians.q.out @@ -1,56 +1,45 @@ -PREHOOK: query: explain -select radians(57.2958) FROM src LIMIT 1 +PREHOOK: query: explain +select radians(57.2958) FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: explain -select radians(57.2958) FROM src LIMIT 1 +POSTHOOK: query: explain +select radians(57.2958) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION radians 57.2958))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION radians 57.2958))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: radians(57.2958) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: radians(57.2958) + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select radians(57.2958) FROM src LIMIT 1 +PREHOOK: query: select radians(57.2958) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select radians(57.2958) FROM src LIMIT 1 +POSTHOOK: query: select radians(57.2958) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.000000357564167 -PREHOOK: query: select radians(143.2394) FROM src LIMIT 1 +PREHOOK: query: select radians(143.2394) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select radians(143.2394) FROM src LIMIT 1 +POSTHOOK: query: select radians(143.2394) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -70,58 +59,47 @@ Example: 1.5707963267949mo PREHOOK: query: explain -select radians(57.2958) FROM src LIMIT 1 +select radians(57.2958) FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: explain -select radians(57.2958) FROM src LIMIT 1 +select radians(57.2958) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION radians 57.2958))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION radians 57.2958))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: radians(57.2958) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: radians(57.2958) + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select radians(57.2958) FROM src LIMIT 1 +PREHOOK: query: select radians(57.2958) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select radians(57.2958) FROM src LIMIT 1 +POSTHOOK: query: select radians(57.2958) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.000000357564167 -PREHOOK: query: select radians(143.2394) FROM src LIMIT 1 +PREHOOK: query: select radians(143.2394) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select radians(143.2394) FROM src LIMIT 1 +POSTHOOK: query: select radians(143.2394) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_reflect.q.out b/ql/src/test/results/clientpositive/udf_reflect.q.out index 52fa427..18b96f3 100644 --- a/ql/src/test/results/clientpositive/udf_reflect.q.out +++ b/ql/src/test/results/clientpositive/udf_reflect.q.out @@ -20,7 +20,7 @@ SELECT reflect("java.lang.String", "valueOf", 1), reflect("java.lang.Math", "exp", 1.0), reflect("java.lang.Math", "floor", 1.9), reflect("java.lang.Integer", "valueOf", key, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN EXTENDED SELECT reflect("java.lang.String", "valueOf", 1), @@ -31,114 +31,43 @@ SELECT reflect("java.lang.String", "valueOf", 1), reflect("java.lang.Math", "exp", 1.0), reflect("java.lang.Math", "floor", 1.9), reflect("java.lang.Integer", "valueOf", key, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.String" "valueOf" 1)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.String" "isEmpty")) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "max" 2 3)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "min" 2 3)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "round" 2.5)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "exp" 1.0)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "floor" 1.9)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Integer" "valueOf" (TOK_TABLE_OR_COL key) 16))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.String" "valueOf" 1)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.String" "isEmpty")) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "max" 2 3)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "min" 2 3)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "round" 2.5)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "exp" 1.0)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Math" "floor" 1.9)) (TOK_SELEXPR (TOK_FUNCTION reflect "java.lang.Integer" "valueOf" (TOK_TABLE_OR_COL key) 16))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - GatherStats: false - Select Operator - expressions: - expr: reflect('java.lang.String','valueOf',1) - type: string - expr: reflect('java.lang.String','isEmpty') - type: string - expr: reflect('java.lang.Math','max',2,3) - type: string - expr: reflect('java.lang.Math','min',2,3) - type: string - expr: reflect('java.lang.Math','round',2.5) - type: string - expr: reflect('java.lang.Math','exp',1.0) - type: string - expr: reflect('java.lang.Math','floor',1.9) - type: string - expr: reflect('java.lang.Integer','valueOf',key,16) - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 - columns.types string:string:string:string:string:string:string:string - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: src - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string -#### A masked pattern was here #### - name default.src - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct src { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src - name: default.src - Truncated Path -> Alias: - /src [src] - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + GatherStats: false + Select Operator + expressions: + expr: reflect('java.lang.String','valueOf',1) + type: string + expr: reflect('java.lang.String','isEmpty') + type: string + expr: reflect('java.lang.Math','max',2,3) + type: string + expr: reflect('java.lang.Math','min',2,3) + type: string + expr: reflect('java.lang.Math','round',2.5) + type: string + expr: reflect('java.lang.Math','exp',1.0) + type: string + expr: reflect('java.lang.Math','floor',1.9) + type: string + expr: reflect('java.lang.Integer','valueOf',key,16) + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + ListSink PREHOOK: query: SELECT reflect("java.lang.String", "valueOf", 1), @@ -149,7 +78,7 @@ PREHOOK: query: SELECT reflect("java.lang.String", "valueOf", 1), reflect("java.lang.Math", "exp", 1.0), reflect("java.lang.Math", "floor", 1.9), reflect("java.lang.Integer", "valueOf", key, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -161,7 +90,7 @@ POSTHOOK: query: SELECT reflect("java.lang.String", "valueOf", 1), reflect("java.lang.Math", "exp", 1.0), reflect("java.lang.Math", "floor", 1.9), reflect("java.lang.Integer", "valueOf", key, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_regexp.q.out b/ql/src/test/results/clientpositive/udf_regexp.q.out index 30ce12a..e27f46a 100644 --- a/ql/src/test/results/clientpositive/udf_regexp.q.out +++ b/ql/src/test/results/clientpositive/udf_regexp.q.out @@ -14,13 +14,13 @@ Example: true PREHOOK: query: SELECT 'fofo' REGEXP '^fo', 'fo\no' REGEXP '^fo\no$', 'Bn' REGEXP '^Ba*n', 'afofo' REGEXP 'fo', 'afofo' REGEXP '^fo', 'Baan' REGEXP '^Ba?n', 'axe' REGEXP 'pi|apa', 'pip' REGEXP '^(pi)*$' -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT 'fofo' REGEXP '^fo', 'fo\no' REGEXP '^fo\no$', 'Bn' REGEXP '^Ba*n', 'afofo' REGEXP 'fo', 'afofo' REGEXP '^fo', 'Baan' REGEXP '^Ba?n', 'axe' REGEXP 'pi|apa', 'pip' REGEXP '^(pi)*$' -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_repeat.q.out b/ql/src/test/results/clientpositive/udf_repeat.q.out index 270c03f..1e23da9 100644 --- a/ql/src/test/results/clientpositive/udf_repeat.q.out +++ b/ql/src/test/results/clientpositive/udf_repeat.q.out @@ -16,52 +16,41 @@ PREHOOK: query: EXPLAIN SELECT repeat("", 4), repeat("asd", 0), repeat("asdf", -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT repeat("Facebook", 3), repeat("", 4), repeat("asd", 0), repeat("asdf", -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION repeat "Facebook" 3)) (TOK_SELEXPR (TOK_FUNCTION repeat "" 4)) (TOK_SELEXPR (TOK_FUNCTION repeat "asd" 0)) (TOK_SELEXPR (TOK_FUNCTION repeat "asdf" (- 1)))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION repeat "Facebook" 3)) (TOK_SELEXPR (TOK_FUNCTION repeat "" 4)) (TOK_SELEXPR (TOK_FUNCTION repeat "asd" 0)) (TOK_SELEXPR (TOK_FUNCTION repeat "asdf" (- 1)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: repeat('Facebook', 3) - type: string - expr: repeat('', 4) - type: string - expr: repeat('asd', 0) - type: string - expr: repeat('asdf', (- 1)) - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: repeat('Facebook', 3) + type: string + expr: repeat('', 4) + type: string + expr: repeat('asd', 0) + type: string + expr: repeat('asdf', (- 1)) + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + ListSink PREHOOK: query: SELECT @@ -69,7 +58,7 @@ PREHOOK: query: SELECT repeat("", 4), repeat("asd", 0), repeat("asdf", -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -78,7 +67,7 @@ POSTHOOK: query: SELECT repeat("", 4), repeat("asd", 0), repeat("asdf", -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_round.q.out b/ql/src/test/results/clientpositive/udf_round.q.out index 9ad1d91..1b10a4b 100644 --- a/ql/src/test/results/clientpositive/udf_round.q.out +++ b/ql/src/test/results/clientpositive/udf_round.q.out @@ -13,13 +13,13 @@ Example: 12.3' PREHOOK: query: SELECT round(null), round(null, 0), round(125, null), round(1.0/0.0, 0), round(power(-1.0,0.5), 0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT round(null), round(null, 0), round(125, null), round(1.0/0.0, 0), round(power(-1.0,0.5), 0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -28,7 +28,7 @@ PREHOOK: query: SELECT round(55555), round(55555, 0), round(55555, 1), round(55555, 2), round(55555, 3), round(55555, -1), round(55555, -2), round(55555, -3), round(55555, -4), round(55555, -5), round(55555, -6), round(55555, -7), round(55555, -8) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -36,7 +36,7 @@ POSTHOOK: query: SELECT round(55555), round(55555, 0), round(55555, 1), round(55555, 2), round(55555, 3), round(55555, -1), round(55555, -2), round(55555, -3), round(55555, -4), round(55555, -5), round(55555, -6), round(55555, -7), round(55555, -8) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -48,7 +48,7 @@ PREHOOK: query: SELECT round(-125.315), round(-125.315, 0), round(-125.315, 1), round(-125.315, 2), round(-125.315, 3), round(-125.315, 4), round(-125.315, -1), round(-125.315, -2), round(-125.315, -3), round(-125.315, -4) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -59,7 +59,7 @@ POSTHOOK: query: SELECT round(-125.315), round(-125.315, 0), round(-125.315, 1), round(-125.315, 2), round(-125.315, 3), round(-125.315, 4), round(-125.315, -1), round(-125.315, -2), round(-125.315, -3), round(-125.315, -4) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -82,7 +82,7 @@ PREHOOK: query: SELECT round(3.141592653589793, 12), round(3.141592653589793, 13), round(3.141592653589793, 13), round(3.141592653589793, 14), round(3.141592653589793, 15), round(3.141592653589793, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -104,18 +104,18 @@ POSTHOOK: query: SELECT round(3.141592653589793, 12), round(3.141592653589793, 13), round(3.141592653589793, 13), round(3.141592653589793, 14), round(3.141592653589793, 15), round(3.141592653589793, 16) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 3.0 3.1 3.14 3.142 3.1416 3.14159 3.141593 3.1415927 3.14159265 3.141592654 3.1415926536 3.14159265359 3.14159265359 3.1415926535898 3.1415926535898 3.14159265358979 3.141592653589793 3.141592653589793 PREHOOK: query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_round_2.q.out b/ql/src/test/results/clientpositive/udf_round_2.q.out index 5dbbd2b..457d074 100644 --- a/ql/src/test/results/clientpositive/udf_round_2.q.out +++ b/ql/src/test/results/clientpositive/udf_round_2.q.out @@ -1,17 +1,17 @@ -PREHOOK: query: -- test for NaN (not-a-number) +PREHOOK: query: -- test for NaN (not-a-number) create table tstTbl1(n double) PREHOOK: type: CREATETABLE -POSTHOOK: query: -- test for NaN (not-a-number) +POSTHOOK: query: -- test for NaN (not-a-number) create table tstTbl1(n double) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tstTbl1 PREHOOK: query: insert overwrite table tstTbl1 -select 'NaN' from src limit 1 +select 'NaN' from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@tsttbl1 POSTHOOK: query: insert overwrite table tstTbl1 -select 'NaN' from src limit 1 +select 'NaN' from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tsttbl1 @@ -47,12 +47,12 @@ POSTHOOK: Input: default@tsttbl1 POSTHOOK: Lineage: tsttbl1.n EXPRESSION [] NaN PREHOOK: query: -- test for Infinity -select round(1/0), round(1/0, 2), round(1.0/0.0), round(1.0/0.0, 2) from src limit 1 +select round(1/0), round(1/0, 2), round(1.0/0.0), round(1.0/0.0, 2) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- test for Infinity -select round(1/0), round(1/0, 2), round(1.0/0.0), round(1.0/0.0, 2) from src limit 1 +select round(1/0), round(1/0, 2), round(1.0/0.0), round(1.0/0.0, 2) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_round_3.q.out b/ql/src/test/results/clientpositive/udf_round_3.q.out index 0b00d6a..7bc7ca5 100644 --- a/ql/src/test/results/clientpositive/udf_round_3.q.out +++ b/ql/src/test/results/clientpositive/udf_round_3.q.out @@ -1,54 +1,54 @@ PREHOOK: query: -- test for TINYINT -select round(-128), round(127), round(0) from src limit 1 +select round(-128), round(127), round(0) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- test for TINYINT -select round(-128), round(127), round(0) from src limit 1 +select round(-128), round(127), round(0) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -128 127 0 PREHOOK: query: -- test for SMALLINT -select round(-32768), round(32767), round(-129), round(128) from src limit 1 +select round(-32768), round(32767), round(-129), round(128) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- test for SMALLINT -select round(-32768), round(32767), round(-129), round(128) from src limit 1 +select round(-32768), round(32767), round(-129), round(128) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -32768 32767 -129 128 PREHOOK: query: -- test for INT -select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as INT)), round(-32769), round(32768) from src limit 1 +select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as INT)), round(-32769), round(32768) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- test for INT -select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as INT)), round(-32769), round(32768) from src limit 1 +select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as INT)), round(-32769), round(32768) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -2147483648 2147483647 -32769 32768 PREHOOK: query: -- test for BIGINT -select round(cast(negative(pow(2, 63)) as BIGINT)), round(cast((pow(2, 63) - 1) as BIGINT)), round(cast(negative(pow(2, 31) + 1) as BIGINT)), round(cast(pow(2, 31) as BIGINT)) from src limit 1 +select round(cast(negative(pow(2, 63)) as BIGINT)), round(cast((pow(2, 63) - 1) as BIGINT)), round(cast(negative(pow(2, 31) + 1) as BIGINT)), round(cast(pow(2, 31) as BIGINT)) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- test for BIGINT -select round(cast(negative(pow(2, 63)) as BIGINT)), round(cast((pow(2, 63) - 1) as BIGINT)), round(cast(negative(pow(2, 31) + 1) as BIGINT)), round(cast(pow(2, 31) as BIGINT)) from src limit 1 +select round(cast(negative(pow(2, 63)) as BIGINT)), round(cast((pow(2, 63) - 1) as BIGINT)), round(cast(negative(pow(2, 31) + 1) as BIGINT)), round(cast(pow(2, 31) as BIGINT)) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -9223372036854775808 9223372036854775807 -2147483649 2147483648 PREHOOK: query: -- test for DOUBLE -select round(126.1), round(126.7), round(32766.1), round(32766.7) from src limit 1 +select round(126.1), round(126.7), round(32766.1), round(32766.7) from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- test for DOUBLE -select round(126.1), round(126.7), round(32766.1), round(32766.7) from src limit 1 +select round(126.1), round(126.7), round(32766.1), round(32766.7) from src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_rpad.q.out b/ql/src/test/results/clientpositive/udf_rpad.q.out index fc82de2..2119a1d 100644 --- a/ql/src/test/results/clientpositive/udf_rpad.q.out +++ b/ql/src/test/results/clientpositive/udf_rpad.q.out @@ -17,56 +17,45 @@ PREHOOK: query: EXPLAIN SELECT rpad('hi', 1, '?'), rpad('hi', 5, '.'), rpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT rpad('hi', 1, '?'), rpad('hi', 5, '.'), rpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION rpad 'hi' 1 '?')) (TOK_SELEXPR (TOK_FUNCTION rpad 'hi' 5 '.')) (TOK_SELEXPR (TOK_FUNCTION rpad 'hi' 6 '123'))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION rpad 'hi' 1 '?')) (TOK_SELEXPR (TOK_FUNCTION rpad 'hi' 5 '.')) (TOK_SELEXPR (TOK_FUNCTION rpad 'hi' 6 '123'))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: rpad('hi', 1, '?') - type: string - expr: rpad('hi', 5, '.') - type: string - expr: rpad('hi', 6, '123') - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: rpad('hi', 1, '?') + type: string + expr: rpad('hi', 5, '.') + type: string + expr: rpad('hi', 6, '123') + type: string + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT rpad('hi', 1, '?'), rpad('hi', 5, '.'), rpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -74,7 +63,7 @@ POSTHOOK: query: SELECT rpad('hi', 1, '?'), rpad('hi', 5, '.'), rpad('hi', 6, '123') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_second.q.out b/ql/src/test/results/clientpositive/udf_second.q.out index c63243c..b3b401a 100644 --- a/ql/src/test/results/clientpositive/udf_second.q.out +++ b/ql/src/test/results/clientpositive/udf_second.q.out @@ -26,40 +26,29 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION second '2009-08-07 13:14:15')) (TOK_SELEXPR (TOK_FUNCTION second '13:14:15')) (TOK_SELEXPR (TOK_FUNCTION second '2009-08-07'))) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 86)))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: second('2009-08-07 13:14:15') - type: int - expr: second('13:14:15') - type: int - expr: second('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: -1 + Processor Tree: + TableScan + alias: src + Filter Operator + predicate: + expr: (key = 86) + type: boolean + Select Operator + expressions: + expr: second('2009-08-07 13:14:15') + type: int + expr: second('13:14:15') + type: int + expr: second('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT second('2009-08-07 13:14:15'), second('13:14:15'), second('2009-08-07') diff --git a/ql/src/test/results/clientpositive/udf_sign.q.out b/ql/src/test/results/clientpositive/udf_sign.q.out index c5fe12d..9392ed7 100644 --- a/ql/src/test/results/clientpositive/udf_sign.q.out +++ b/ql/src/test/results/clientpositive/udf_sign.q.out @@ -1,65 +1,54 @@ -PREHOOK: query: explain -select sign(0) FROM src LIMIT 1 +PREHOOK: query: explain +select sign(0) FROM src tablesample (1 rows) PREHOOK: type: QUERY -POSTHOOK: query: explain -select sign(0) FROM src LIMIT 1 +POSTHOOK: query: explain +select sign(0) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sign 0))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sign 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: sign(0) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: sign(0) + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select sign(0) FROM src LIMIT 1 +PREHOOK: query: select sign(0) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select sign(0) FROM src LIMIT 1 +POSTHOOK: query: select sign(0) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0.0 -PREHOOK: query: select sign(-45) FROM src LIMIT 1 +PREHOOK: query: select sign(-45) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select sign(-45) FROM src LIMIT 1 +POSTHOOK: query: select sign(-45) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1.0 -PREHOOK: query: select sign(46) FROM src LIMIT 1 +PREHOOK: query: select sign(46) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select sign(46) FROM src LIMIT 1 +POSTHOOK: query: select sign(46) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -78,67 +67,56 @@ Example: > SELECT sign(40) FROM src LIMIT 1; 1 PREHOOK: query: explain -select sign(0) FROM src LIMIT 1 +select sign(0) FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: explain -select sign(0) FROM src LIMIT 1 +select sign(0) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sign 0))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sign 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: sign(0) - type: double - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: sign(0) + type: double + outputColumnNames: _col0 + ListSink -PREHOOK: query: select sign(0) FROM src LIMIT 1 +PREHOOK: query: select sign(0) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select sign(0) FROM src LIMIT 1 +POSTHOOK: query: select sign(0) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0.0 -PREHOOK: query: select sign(-45) FROM src LIMIT 1 +PREHOOK: query: select sign(-45) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select sign(-45) FROM src LIMIT 1 +POSTHOOK: query: select sign(-45) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1.0 -PREHOOK: query: select sign(46) FROM src LIMIT 1 +PREHOOK: query: select sign(46) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: select sign(46) FROM src LIMIT 1 +POSTHOOK: query: select sign(46) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_sin.q.out b/ql/src/test/results/clientpositive/udf_sin.q.out index cee9ddd..47e7cee 100644 --- a/ql/src/test/results/clientpositive/udf_sin.q.out +++ b/ql/src/test/results/clientpositive/udf_sin.q.out @@ -12,23 +12,23 @@ Example: > SELECT sin(0) FROM src LIMIT 1; 0 PREHOOK: query: SELECT sin(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT sin(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT sin(0.98), sin(1.57), sin(-0.5) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT sin(0.98), sin(1.57), sin(-0.5) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_size.q.out b/ql/src/test/results/clientpositive/udf_size.q.out index ffe830d..06c78ac 100644 --- a/ql/src/test/results/clientpositive/udf_size.q.out +++ b/ql/src/test/results/clientpositive/udf_size.q.out @@ -30,43 +30,32 @@ ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) lint))) (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) mstringstring))) (TOK_SELEXPR (TOK_FUNCTION size TOK_NULL))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint)) (NOT (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring))))) (TOK_LIMIT 1))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src_thrift - TableScan - alias: src_thrift - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: size(lint) - type: int - expr: size(lintstring) - type: int - expr: size(mstringstring) - type: int - expr: size(null) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator limit: 1 + Processor Tree: + TableScan + alias: src_thrift + Filter Operator + predicate: + expr: (lint is not null and (not mstringstring is null)) + type: boolean + Select Operator + expressions: + expr: size(lint) + type: int + expr: size(lintstring) + type: int + expr: size(mstringstring) + type: int + expr: size(null) + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + ListSink PREHOOK: query: FROM src_thrift diff --git a/ql/src/test/results/clientpositive/udf_sort_array.q.out b/ql/src/test/results/clientpositive/udf_sort_array.q.out index d8316bd..8377859 100644 --- a/ql/src/test/results/clientpositive/udf_sort_array.q.out +++ b/ql/src/test/results/clientpositive/udf_sort_array.q.out @@ -21,81 +21,70 @@ Example: 'a', 'b', 'c', 'd' PREHOOK: query: -- Evaluate function against STRING valued keys EXPLAIN -SELECT sort_array(array("b", "d", "c", "a")) FROM src LIMIT 1 +SELECT sort_array(array("b", "d", "c", "a")) FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: -- Evaluate function against STRING valued keys EXPLAIN -SELECT sort_array(array("b", "d", "c", "a")) FROM src LIMIT 1 +SELECT sort_array(array("b", "d", "c", "a")) FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sort_array (TOK_FUNCTION array "b" "d" "c" "a")))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sort_array (TOK_FUNCTION array "b" "d" "c" "a")))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: sort_array(array('b','d','c','a')) - type: array - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: sort_array(array('b','d','c','a')) + type: array + outputColumnNames: _col0 + ListSink -PREHOOK: query: SELECT sort_array(array("f", "a", "g", "c", "b", "d", "e")) FROM src LIMIT 1 +PREHOOK: query: SELECT sort_array(array("f", "a", "g", "c", "b", "d", "e")) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT sort_array(array("f", "a", "g", "c", "b", "d", "e")) FROM src LIMIT 1 +POSTHOOK: query: SELECT sort_array(array("f", "a", "g", "c", "b", "d", "e")) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ["a","b","c","d","e","f","g"] -PREHOOK: query: SELECT sort_array(sort_array(array("hadoop distributed file system", "enterprise databases", "hadoop map-reduce"))) FROM src LIMIT 1 +PREHOOK: query: SELECT sort_array(sort_array(array("hadoop distributed file system", "enterprise databases", "hadoop map-reduce"))) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT sort_array(sort_array(array("hadoop distributed file system", "enterprise databases", "hadoop map-reduce"))) FROM src LIMIT 1 +POSTHOOK: query: SELECT sort_array(sort_array(array("hadoop distributed file system", "enterprise databases", "hadoop map-reduce"))) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ["enterprise databases","hadoop distributed file system","hadoop map-reduce"] PREHOOK: query: -- Evaluate function against INT valued keys -SELECT sort_array(array(2, 9, 7, 3, 5, 4, 1, 6, 8)) FROM src LIMIT 1 +SELECT sort_array(array(2, 9, 7, 3, 5, 4, 1, 6, 8)) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Evaluate function against INT valued keys -SELECT sort_array(array(2, 9, 7, 3, 5, 4, 1, 6, 8)) FROM src LIMIT 1 +SELECT sort_array(array(2, 9, 7, 3, 5, 4, 1, 6, 8)) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### [1,2,3,4,5,6,7,8,9] PREHOOK: query: -- Evaluate function against FLOAT valued keys -SELECT sort_array(sort_array(array(2.333, 9, 1.325, 2.003, 0.777, -3.445, 1))) FROM src LIMIT 1 +SELECT sort_array(sort_array(array(2.333, 9, 1.325, 2.003, 0.777, -3.445, 1))) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Evaluate function against FLOAT valued keys -SELECT sort_array(sort_array(array(2.333, 9, 1.325, 2.003, 0.777, -3.445, 1))) FROM src LIMIT 1 +SELECT sort_array(sort_array(array(2.333, 9, 1.325, 2.003, 0.777, -3.445, 1))) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_space.q.out b/ql/src/test/results/clientpositive/udf_space.q.out index 9e206a3..bcfdedc 100644 --- a/ql/src/test/results/clientpositive/udf_space.q.out +++ b/ql/src/test/results/clientpositive/udf_space.q.out @@ -17,7 +17,7 @@ PREHOOK: query: EXPLAIN SELECT space(1), space(-1), space(-100) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT space(10), @@ -25,47 +25,36 @@ POSTHOOK: query: EXPLAIN SELECT space(1), space(-1), space(-100) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION space 10)) (TOK_SELEXPR (TOK_FUNCTION space 0)) (TOK_SELEXPR (TOK_FUNCTION space 1)) (TOK_SELEXPR (TOK_FUNCTION space (- 1))) (TOK_SELEXPR (TOK_FUNCTION space (- 100)))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION space 10)) (TOK_SELEXPR (TOK_FUNCTION space 0)) (TOK_SELEXPR (TOK_FUNCTION space 1)) (TOK_SELEXPR (TOK_FUNCTION space (- 1))) (TOK_SELEXPR (TOK_FUNCTION space (- 100)))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: space(10) - type: string - expr: space(0) - type: string - expr: space(1) - type: string - expr: space((- 1)) - type: string - expr: space((- 100)) - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: space(10) + type: string + expr: space(0) + type: string + expr: space(1) + type: string + expr: space((- 1)) + type: string + expr: space((- 100)) + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + ListSink PREHOOK: query: SELECT @@ -74,7 +63,7 @@ PREHOOK: query: SELECT length(space(1)), length(space(-1)), length(space(-100)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -84,7 +73,7 @@ POSTHOOK: query: SELECT length(space(1)), length(space(-1)), length(space(-100)) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -95,7 +84,7 @@ PREHOOK: query: SELECT space(1), space(-1), space(-100) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -105,7 +94,7 @@ POSTHOOK: query: SELECT space(1), space(-1), space(-100) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_split.q.out b/ql/src/test/results/clientpositive/udf_split.q.out index c8adca6..f927b64 100644 --- a/ql/src/test/results/clientpositive/udf_split.q.out +++ b/ql/src/test/results/clientpositive/udf_split.q.out @@ -16,52 +16,41 @@ PREHOOK: query: EXPLAIN SELECT split('oneAtwoBthreeC', '[ABC]'), split('', '.'), split(50401020, 0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT split('a b c', ' '), split('oneAtwoBthreeC', '[ABC]'), split('', '.'), split(50401020, 0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION split 'a b c' ' ')) (TOK_SELEXPR (TOK_FUNCTION split 'oneAtwoBthreeC' '[ABC]')) (TOK_SELEXPR (TOK_FUNCTION split '' '.')) (TOK_SELEXPR (TOK_FUNCTION split 50401020 0))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION split 'a b c' ' ')) (TOK_SELEXPR (TOK_FUNCTION split 'oneAtwoBthreeC' '[ABC]')) (TOK_SELEXPR (TOK_FUNCTION split '' '.')) (TOK_SELEXPR (TOK_FUNCTION split 50401020 0))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: split('a b c', ' ') - type: array - expr: split('oneAtwoBthreeC', '[ABC]') - type: array - expr: split('', '.') - type: array - expr: split(50401020, 0) - type: array - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: split('a b c', ' ') + type: array + expr: split('oneAtwoBthreeC', '[ABC]') + type: array + expr: split('', '.') + type: array + expr: split(50401020, 0) + type: array + outputColumnNames: _col0, _col1, _col2, _col3 + ListSink PREHOOK: query: SELECT @@ -69,7 +58,7 @@ PREHOOK: query: SELECT split('oneAtwoBthreeC', '[ABC]'), split('', '.'), split(50401020, 0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -78,8 +67,8 @@ POSTHOOK: query: SELECT split('oneAtwoBthreeC', '[ABC]'), split('', '.'), split(50401020, 0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -["a","b","c"] ["one","two","three",""] [] ["5","4","1","2",""] +["a","b","c"] ["one","two","three",""] [""] ["5","4","1","2",""] diff --git a/ql/src/test/results/clientpositive/udf_struct.q.out b/ql/src/test/results/clientpositive/udf_struct.q.out index b2ee08f..4d4026b 100644 --- a/ql/src/test/results/clientpositive/udf_struct.q.out +++ b/ql/src/test/results/clientpositive/udf_struct.q.out @@ -10,58 +10,47 @@ POSTHOOK: type: DESCFUNCTION struct(col1, col2, col3, ...) - Creates a struct with the given field values PREHOOK: query: EXPLAIN SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION struct 1)) (TOK_SELEXPR (TOK_FUNCTION struct 1 "a")) (TOK_SELEXPR (. (TOK_FUNCTION struct 1 "b" 1.5) col1)) (TOK_SELEXPR (. (. (TOK_FUNCTION struct 1 (TOK_FUNCTION struct "a" 1.5)) col2) col1))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION struct 1)) (TOK_SELEXPR (TOK_FUNCTION struct 1 "a")) (TOK_SELEXPR (. (TOK_FUNCTION struct 1 "b" 1.5) col1)) (TOK_SELEXPR (. (. (TOK_FUNCTION struct 1 (TOK_FUNCTION struct "a" 1.5)) col2) col1))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: struct(1) - type: struct - expr: struct(1,'a') - type: struct - expr: struct(1,'b',1.5).col1 - type: int - expr: struct(1,struct('a',1.5)).col2.col1 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: struct(1) + type: struct + expr: struct(1,'a') + type: struct + expr: struct(1,'b',1.5).col1 + type: int + expr: struct(1,struct('a',1.5)).col2.col1 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + ListSink PREHOOK: query: SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT struct(1), struct(1, "a"), struct(1, "b", 1.5).col1, struct(1, struct("a", 1.5)).col2.col1 -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_substr.q.out b/ql/src/test/results/clientpositive/udf_substr.q.out index 712725e..ab64289 100644 --- a/ql/src/test/results/clientpositive/udf_substr.q.out +++ b/ql/src/test/results/clientpositive/udf_substr.q.out @@ -21,7 +21,7 @@ PREHOOK: query: SELECT substr(null, 1), substr(null, 1, 1), substr('ABC', null), substr('ABC', null, 1), substr('ABC', 1, null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -29,7 +29,7 @@ POSTHOOK: query: SELECT substr(null, 1), substr(null, 1, 1), substr('ABC', null), substr('ABC', null, 1), substr('ABC', 1, null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -41,7 +41,7 @@ PREHOOK: query: SELECT substr('ABC', 100), substr('ABC', 100, 100), substr('ABC', -100), substr('ABC', -100, 100), substr('ABC', 2147483647), substr('ABC', 2147483647, 2147483647) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -52,7 +52,7 @@ POSTHOOK: query: SELECT substr('ABC', 100), substr('ABC', 100, 100), substr('ABC', -100), substr('ABC', -100, 100), substr('ABC', 2147483647), substr('ABC', 2147483647, 2147483647) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -63,7 +63,7 @@ PREHOOK: query: SELECT substr('ABC', 0), substr('ABC', 1), substr('ABC', 2), substr('ABC', 3), substr('ABC', 1, 2147483647), substr('ABC', 2, 2147483647), substr('A', 0), substr('A', 1), substr('A', -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -73,7 +73,7 @@ POSTHOOK: query: SELECT substr('ABC', 0), substr('ABC', 1), substr('ABC', 2), substr('ABC', 3), substr('ABC', 1, 2147483647), substr('ABC', 2, 2147483647), substr('A', 0), substr('A', 1), substr('A', -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -84,7 +84,7 @@ PREHOOK: query: SELECT substr('ABC', 2, 1), substr('ABC', 2, 2), substr('ABC', 2, 3), substr('ABC', 2, 4), substr('ABC', 3, 1), substr('ABC', 3, 2), substr('ABC', 3, 3), substr('ABC', 3, 4), substr('ABC', 4, 1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -94,7 +94,7 @@ POSTHOOK: query: SELECT substr('ABC', 2, 1), substr('ABC', 2, 2), substr('ABC', 2, 3), substr('ABC', 2, 4), substr('ABC', 3, 1), substr('ABC', 3, 2), substr('ABC', 3, 3), substr('ABC', 3, 4), substr('ABC', 4, 1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -104,7 +104,7 @@ PREHOOK: query: SELECT substr('ABC', -2, 1), substr('ABC', -2, 2), substr('ABC', -2, 3), substr('ABC', -2, 4), substr('ABC', -3, 1), substr('ABC', -3, 2), substr('ABC', -3, 3), substr('ABC', -3, 4), substr('ABC', -4, 1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -113,7 +113,7 @@ POSTHOOK: query: SELECT substr('ABC', -2, 1), substr('ABC', -2, 2), substr('ABC', -2, 3), substr('ABC', -2, 4), substr('ABC', -3, 1), substr('ABC', -3, 2), substr('ABC', -3, 3), substr('ABC', -3, 4), substr('ABC', -4, 1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -125,7 +125,7 @@ SELECT substring('ABC', 0), substring('ABC', 1), substring('ABC', 2), substring('ABC', 3), substring('ABC', 1, 2147483647), substring('ABC', 2, 2147483647), substring('A', 0), substring('A', 1), substring('A', -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -136,7 +136,7 @@ SELECT substring('ABC', 0), substring('ABC', 1), substring('ABC', 2), substring('ABC', 3), substring('ABC', 1, 2147483647), substring('ABC', 2, 2147483647), substring('A', 0), substring('A', 1), substring('A', -1) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -156,7 +156,7 @@ SELECT substr(ABC, -3, 1), substr(ABC, -3, 2), substr(ABC, -3, 3), substr(ABC, -3, 4), substr(ABC, -4, 1) FROM ( - select CAST(concat(substr(value, 1, 0), 'ABC') as BINARY) as ABC from src LIMIT 1 + select CAST(concat(substr(value, 1, 0), 'ABC') as BINARY) as ABC from src tablesample (1 rows) ) X PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -176,7 +176,7 @@ SELECT substr(ABC, -3, 1), substr(ABC, -3, 2), substr(ABC, -3, 3), substr(ABC, -3, 4), substr(ABC, -4, 1) FROM ( - select CAST(concat(substr(value, 1, 0), 'ABC') as BINARY) as ABC from src LIMIT 1 + select CAST(concat(substr(value, 1, 0), 'ABC') as BINARY) as ABC from src tablesample (1 rows) ) X POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -188,7 +188,7 @@ SELECT substr("abc 玩", 5), substr("abc 玩玩玩 abc", 5), substr("abc 玩玩玩 abc", 5, 3) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -198,7 +198,7 @@ SELECT substr("abc 玩", 5), substr("abc 玩玩玩 abc", 5), substr("abc 玩玩玩 abc", 5, 3) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_tan.q.out b/ql/src/test/results/clientpositive/udf_tan.q.out index 96ab03b..42fef20 100644 --- a/ql/src/test/results/clientpositive/udf_tan.q.out +++ b/ql/src/test/results/clientpositive/udf_tan.q.out @@ -12,23 +12,23 @@ Example: > SELECT tan(0) FROM src LIMIT 1; 1 PREHOOK: query: SELECT tan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT tan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT tan(1), tan(6), tan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT tan(1), tan(6), tan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -47,23 +47,23 @@ Example: > SELECT tan(0) FROM src LIMIT 1; 1 PREHOOK: query: SELECT tan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT tan(null) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL PREHOOK: query: SELECT tan(1), tan(6), tan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT tan(1), tan(6), tan(-1.0) -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_testlength.q.out b/ql/src/test/results/clientpositive/udf_testlength.q.out index 8fb034c..8b000a9 100644 --- a/ql/src/test/results/clientpositive/udf_testlength.q.out +++ b/ql/src/test/results/clientpositive/udf_testlength.q.out @@ -18,29 +18,14 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive. PREHOOK: type: CREATEFUNCTION POSTHOOK: query: CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength' POSTHOOK: type: CREATEFUNCTION -PREHOOK: query: CREATE TABLE dest1(len INT) -PREHOOK: type: CREATETABLE -POSTHOOK: query: CREATE TABLE dest1(len INT) -POSTHOOK: type: CREATETABLE -POSTHOOK: Output: default@dest1 -PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value) +PREHOOK: query: SELECT testlength(src.value) FROM src PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@dest1 -POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -POSTHOOK: Output: default@dest1 -POSTHOOK: Lineage: dest1.len EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: SELECT dest1.* FROM dest1 -PREHOOK: type: QUERY -PREHOOK: Input: default@dest1 #### A masked pattern was here #### -POSTHOOK: query: SELECT dest1.* FROM dest1 +POSTHOOK: query: SELECT testlength(src.value) FROM src POSTHOOK: type: QUERY -POSTHOOK: Input: default@dest1 +POSTHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: Lineage: dest1.len EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] 7 6 7 @@ -545,4 +530,3 @@ PREHOOK: query: DROP TEMPORARY FUNCTION testlength PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION testlength POSTHOOK: type: DROPFUNCTION -POSTHOOK: Lineage: dest1.len EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] diff --git a/ql/src/test/results/clientpositive/udf_testlength2.q.out b/ql/src/test/results/clientpositive/udf_testlength2.q.out index 9913d24..c9033e1 100644 --- a/ql/src/test/results/clientpositive/udf_testlength2.q.out +++ b/ql/src/test/results/clientpositive/udf_testlength2.q.out @@ -18,29 +18,14 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive PREHOOK: type: CREATEFUNCTION POSTHOOK: query: CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2' POSTHOOK: type: CREATEFUNCTION -PREHOOK: query: CREATE TABLE dest1(len INT) -PREHOOK: type: CREATETABLE -POSTHOOK: query: CREATE TABLE dest1(len INT) -POSTHOOK: type: CREATETABLE -POSTHOOK: Output: default@dest1 -PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength2(src.value) +PREHOOK: query: SELECT testlength2(src.value) FROM src PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@dest1 -POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength2(src.value) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -POSTHOOK: Output: default@dest1 -POSTHOOK: Lineage: dest1.len EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: SELECT dest1.* FROM dest1 -PREHOOK: type: QUERY -PREHOOK: Input: default@dest1 #### A masked pattern was here #### -POSTHOOK: query: SELECT dest1.* FROM dest1 +POSTHOOK: query: SELECT testlength2(src.value) FROM src POSTHOOK: type: QUERY -POSTHOOK: Input: default@dest1 +POSTHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: Lineage: dest1.len EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] 7 6 7 @@ -545,4 +530,3 @@ PREHOOK: query: DROP TEMPORARY FUNCTION testlength2 PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION testlength2 POSTHOOK: type: DROPFUNCTION -POSTHOOK: Lineage: dest1.len EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] diff --git a/ql/src/test/results/clientpositive/udf_to_boolean.q.out b/ql/src/test/results/clientpositive/udf_to_boolean.q.out index 61a8a1d..d50d8c2 100644 --- a/ql/src/test/results/clientpositive/udf_to_boolean.q.out +++ b/ql/src/test/results/clientpositive/udf_to_boolean.q.out @@ -1,261 +1,261 @@ -PREHOOK: query: -- 'true' cases: +PREHOOK: query: -- 'true' cases: -SELECT CAST(CAST(1 AS TINYINT) AS BOOLEAN) FROM src LIMIT 1 +SELECT CAST(CAST(1 AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- 'true' cases: +POSTHOOK: query: -- 'true' cases: -SELECT CAST(CAST(1 AS TINYINT) AS BOOLEAN) FROM src LIMIT 1 +SELECT CAST(CAST(1 AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST(2 AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(2 AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(2 AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(2 AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST(-4 AS INT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-4 AS INT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-4 AS INT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-4 AS INT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST(-444 AS BIGINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-444 AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-444 AS BIGINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-444 AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST(7.0 AS FLOAT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(7.0 AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(7.0 AS FLOAT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(7.0 AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST(-8.0 AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-8.0 AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-8.0 AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-8.0 AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST(-99.0 AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-99.0 AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-99.0 AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-99.0 AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST('Foo' AS STRING) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST('Foo' AS STRING) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST('Foo' AS STRING) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST('Foo' AS STRING) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT CAST(CAST('2011-05-06 07:08:09' as timestamp) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST('2011-05-06 07:08:09' as timestamp) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST('2011-05-06 07:08:09' as timestamp) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST('2011-05-06 07:08:09' as timestamp) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true PREHOOK: query: -- 'false' cases: -SELECT CAST(CAST(0 AS TINYINT) AS BOOLEAN) FROM src LIMIT 1 +SELECT CAST(CAST(0 AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- 'false' cases: -SELECT CAST(CAST(0 AS TINYINT) AS BOOLEAN) FROM src LIMIT 1 +SELECT CAST(CAST(0 AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0 AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0 AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0 AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0 AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0 AS INT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0 AS INT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0 AS INT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0 AS INT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0 AS BIGINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0 AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0 AS BIGINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0 AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0.0 AS FLOAT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0.0 AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0.0 AS FLOAT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0.0 AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0.0 AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0.0 AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0.0 AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0.0 AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0.0 AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0.0 AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0.0 AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0.0 AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST('' AS STRING) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST('' AS STRING) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST('' AS STRING) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST('' AS STRING) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT CAST(CAST(0 as timestamp) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(0 as timestamp) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(0 as timestamp) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(0 as timestamp) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false PREHOOK: query: -- 'NULL' cases: -SELECT CAST(NULL AS BOOLEAN) FROM src LIMIT 1 +SELECT CAST(NULL AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- 'NULL' cases: -SELECT CAST(NULL AS BOOLEAN) FROM src LIMIT 1 +SELECT CAST(NULL AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS TINYINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS TINYINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS TINYINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS SMALLINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS SMALLINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS INT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS INT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS INT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS INT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS BIGINT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS BIGINT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS BIGINT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS FLOAT) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS FLOAT) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS FLOAT) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS DOUBLE) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS DOUBLE) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS DECIMAL) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS DECIMAL) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL AS STRING) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL AS STRING) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL AS STRING) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL AS STRING) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(CAST(NULL as timestamp) AS BOOLEAN) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(NULL as timestamp) AS BOOLEAN) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(NULL as timestamp) AS BOOLEAN) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(NULL as timestamp) AS BOOLEAN) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_to_byte.q.out b/ql/src/test/results/clientpositive/udf_to_byte.q.out index aac14e6..642584b 100644 --- a/ql/src/test/results/clientpositive/udf_to_byte.q.out +++ b/ql/src/test/results/clientpositive/udf_to_byte.q.out @@ -1,82 +1,82 @@ -PREHOOK: query: -- Conversion of main primitive types to Byte type: -SELECT CAST(NULL AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: -- Conversion of main primitive types to Byte type: +SELECT CAST(NULL AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- Conversion of main primitive types to Byte type: -SELECT CAST(NULL AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: -- Conversion of main primitive types to Byte type: +SELECT CAST(NULL AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(TRUE AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(TRUE AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(TRUE AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(TRUE AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -18 -PREHOOK: query: SELECT CAST(-129 AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(-129 AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(-129 AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(-129 AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 127 -PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST('-38' AS TINYINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST('-38' AS TINYINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST('-38' AS TINYINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST('-38' AS TINYINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_to_double.q.out b/ql/src/test/results/clientpositive/udf_to_double.q.out index d5280d1..7efdcd5 100644 --- a/ql/src/test/results/clientpositive/udf_to_double.q.out +++ b/ql/src/test/results/clientpositive/udf_to_double.q.out @@ -1,82 +1,82 @@ -PREHOOK: query: -- Conversion of main primitive types to Double type: -SELECT CAST(NULL AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: -- Conversion of main primitive types to Double type: +SELECT CAST(NULL AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- Conversion of main primitive types to Double type: -SELECT CAST(NULL AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: -- Conversion of main primitive types to Double type: +SELECT CAST(NULL AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(TRUE AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(TRUE AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(TRUE AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(TRUE AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.0 -PREHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -7.0 -PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -18.0 -PREHOOK: query: SELECT CAST(-129 AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(-129 AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(-129 AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(-129 AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -129.0 -PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1025.0 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.140000104904175 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.14 -PREHOOK: query: SELECT CAST('-38.14' AS DOUBLE) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST('-38.14' AS DOUBLE) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST('-38.14' AS DOUBLE) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST('-38.14' AS DOUBLE) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_to_float.q.out b/ql/src/test/results/clientpositive/udf_to_float.q.out index 71b5b79..9868796 100644 --- a/ql/src/test/results/clientpositive/udf_to_float.q.out +++ b/ql/src/test/results/clientpositive/udf_to_float.q.out @@ -1,82 +1,82 @@ -PREHOOK: query: -- Conversion of main primitive types to Float type: -SELECT CAST(NULL AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: -- Conversion of main primitive types to Float type: +SELECT CAST(NULL AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- Conversion of main primitive types to Float type: -SELECT CAST(NULL AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: -- Conversion of main primitive types to Float type: +SELECT CAST(NULL AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(TRUE AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(TRUE AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(TRUE AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(TRUE AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.0 -PREHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -7.0 -PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -18.0 -PREHOOK: query: SELECT CAST(-129 AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(-129 AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(-129 AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(-129 AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -129.0 -PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1025.0 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.14 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.14 -PREHOOK: query: SELECT CAST('-38.14' AS FLOAT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST('-38.14' AS FLOAT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST('-38.14' AS FLOAT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST('-38.14' AS FLOAT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_to_long.q.out b/ql/src/test/results/clientpositive/udf_to_long.q.out index 05c025a..015b6a5 100644 --- a/ql/src/test/results/clientpositive/udf_to_long.q.out +++ b/ql/src/test/results/clientpositive/udf_to_long.q.out @@ -1,82 +1,82 @@ -PREHOOK: query: -- Conversion of main primitive types to Long type: -SELECT CAST(NULL AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: -- Conversion of main primitive types to Long type: +SELECT CAST(NULL AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- Conversion of main primitive types to Long type: -SELECT CAST(NULL AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: -- Conversion of main primitive types to Long type: +SELECT CAST(NULL AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(TRUE AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(TRUE AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(TRUE AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(TRUE AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-7 AS TINYINT) AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -7 -PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -18 -PREHOOK: query: SELECT CAST(-129 AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(-129 AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(-129 AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(-129 AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -129 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST('-38' AS BIGINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST('-38' AS BIGINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST('-38' AS BIGINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST('-38' AS BIGINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_to_short.q.out b/ql/src/test/results/clientpositive/udf_to_short.q.out index 6621f19..06eb815 100644 --- a/ql/src/test/results/clientpositive/udf_to_short.q.out +++ b/ql/src/test/results/clientpositive/udf_to_short.q.out @@ -1,82 +1,82 @@ -PREHOOK: query: -- Conversion of main primitive types to Short type: -SELECT CAST(NULL AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: -- Conversion of main primitive types to Short type: +SELECT CAST(NULL AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- Conversion of main primitive types to Short type: -SELECT CAST(NULL AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: -- Conversion of main primitive types to Short type: +SELECT CAST(NULL AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(TRUE AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(TRUE AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(TRUE AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(TRUE AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT CAST(CAST(-18 AS TINYINT) AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-18 AS TINYINT) AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-18 AS TINYINT) AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-18 AS TINYINT) AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -18 -PREHOOK: query: SELECT CAST(-129 AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(-129 AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(-129 AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(-129 AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -129 -PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1025 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL) AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3 -PREHOOK: query: SELECT CAST('-38' AS SMALLINT) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST('-38' AS SMALLINT) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST('-38' AS SMALLINT) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST('-38' AS SMALLINT) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_to_string.q.out b/ql/src/test/results/clientpositive/udf_to_string.q.out index c164e69..9defcd9 100644 --- a/ql/src/test/results/clientpositive/udf_to_string.q.out +++ b/ql/src/test/results/clientpositive/udf_to_string.q.out @@ -1,91 +1,91 @@ -PREHOOK: query: -- Conversion of main primitive types to String type: -SELECT CAST(NULL AS STRING) FROM src LIMIT 1 +PREHOOK: query: -- Conversion of main primitive types to String type: +SELECT CAST(NULL AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- Conversion of main primitive types to String type: -SELECT CAST(NULL AS STRING) FROM src LIMIT 1 +POSTHOOK: query: -- Conversion of main primitive types to String type: +SELECT CAST(NULL AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL -PREHOOK: query: SELECT CAST(TRUE AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(TRUE AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(TRUE AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(TRUE AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### TRUE -PREHOOK: query: SELECT CAST(CAST(1 AS TINYINT) AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(1 AS TINYINT) AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(1 AS TINYINT) AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(1 AS TINYINT) AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-18 AS SMALLINT) AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -18 -PREHOOK: query: SELECT CAST(-129 AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(-129 AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(-129 AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(-129 AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -129 -PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-1025 AS BIGINT) AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1025 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.14 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.14 -PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -3.14 -PREHOOK: query: SELECT CAST('Foo' AS STRING) FROM src LIMIT 1 +PREHOOK: query: SELECT CAST('Foo' AS STRING) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT CAST('Foo' AS STRING) FROM src LIMIT 1 +POSTHOOK: query: SELECT CAST('Foo' AS STRING) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_translate.q.out b/ql/src/test/results/clientpositive/udf_translate.q.out index ab43c17..e83eab6 100644 --- a/ql/src/test/results/clientpositive/udf_translate.q.out +++ b/ql/src/test/results/clientpositive/udf_translate.q.out @@ -55,13 +55,13 @@ POSTHOOK: Lineage: table_translate.input_string SIMPLE [] POSTHOOK: Lineage: table_translate.to_string SIMPLE [] PREHOOK: query: -- Run some queries on constant input parameters SELECT translate('abcd', 'ab', '12'), - translate('abcd', 'abc', '12') FROM src LIMIT 1 + translate('abcd', 'abc', '12') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Run some queries on constant input parameters SELECT translate('abcd', 'ab', '12'), - translate('abcd', 'abc', '12') FROM src LIMIT 1 + translate('abcd', 'abc', '12') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -72,13 +72,13 @@ POSTHOOK: Lineage: table_translate.to_string SIMPLE [] 12cd 12d PREHOOK: query: -- Run some queries where first parameter being a table column while the other two being constants SELECT translate(table_input.input, 'ab', '12'), - translate(table_input.input, 'abc', '12') FROM table_input LIMIT 1 + translate(table_input.input, 'abc', '12') FROM table_input tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@table_input #### A masked pattern was here #### POSTHOOK: query: -- Run some queries where first parameter being a table column while the other two being constants SELECT translate(table_input.input, 'ab', '12'), - translate(table_input.input, 'abc', '12') FROM table_input LIMIT 1 + translate(table_input.input, 'abc', '12') FROM table_input tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@table_input #### A masked pattern was here #### @@ -88,12 +88,12 @@ POSTHOOK: Lineage: table_translate.input_string SIMPLE [] POSTHOOK: Lineage: table_translate.to_string SIMPLE [] 12cd 12d PREHOOK: query: -- Run some queries where all parameters are coming from table columns -SELECT translate(input_string, from_string, to_string) FROM table_translate LIMIT 1 +SELECT translate(input_string, from_string, to_string) FROM table_translate tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@table_translate #### A masked pattern was here #### POSTHOOK: query: -- Run some queries where all parameters are coming from table columns -SELECT translate(input_string, from_string, to_string) FROM table_translate LIMIT 1 +SELECT translate(input_string, from_string, to_string) FROM table_translate tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@table_translate #### A masked pattern was here #### @@ -106,7 +106,7 @@ PREHOOK: query: -- Run some queries where some parameters are NULL SELECT translate(NULL, 'ab', '12'), translate('abcd', NULL, '12'), translate('abcd', 'ab', NULL), - translate(NULL, NULL, NULL) FROM src LIMIT 1 + translate(NULL, NULL, NULL) FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -114,7 +114,7 @@ POSTHOOK: query: -- Run some queries where some parameters are NULL SELECT translate(NULL, 'ab', '12'), translate('abcd', NULL, '12'), translate('abcd', 'ab', NULL), - translate(NULL, NULL, NULL) FROM src LIMIT 1 + translate(NULL, NULL, NULL) FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -125,13 +125,13 @@ POSTHOOK: Lineage: table_translate.to_string SIMPLE [] NULL NULL NULL NULL PREHOOK: query: -- Run some queries where the same character appears several times in the from string (2nd argument) of the UDF SELECT translate('abcd', 'aba', '123'), - translate('abcd', 'aba', '12') FROM src LIMIT 1 + translate('abcd', 'aba', '12') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Run some queries where the same character appears several times in the from string (2nd argument) of the UDF SELECT translate('abcd', 'aba', '123'), - translate('abcd', 'aba', '12') FROM src LIMIT 1 + translate('abcd', 'aba', '12') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -141,12 +141,12 @@ POSTHOOK: Lineage: table_translate.input_string SIMPLE [] POSTHOOK: Lineage: table_translate.to_string SIMPLE [] 12cd 12cd PREHOOK: query: -- Run some queries for the ignorant case when the 3rd parameter has more characters than the second one -SELECT translate('abcd', 'abc', '1234') FROM src LIMIT 1 +SELECT translate('abcd', 'abc', '1234') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Run some queries for the ignorant case when the 3rd parameter has more characters than the second one -SELECT translate('abcd', 'abc', '1234') FROM src LIMIT 1 +SELECT translate('abcd', 'abc', '1234') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -156,12 +156,12 @@ POSTHOOK: Lineage: table_translate.input_string SIMPLE [] POSTHOOK: Lineage: table_translate.to_string SIMPLE [] 123d PREHOOK: query: -- Test proper function over UTF-8 characters -SELECT translate('Àbcd', 'À', 'Ã') FROM src LIMIT 1 +SELECT translate('Àbcd', 'À', 'Ã') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: -- Test proper function over UTF-8 characters -SELECT translate('Àbcd', 'À', 'Ã') FROM src LIMIT 1 +SELECT translate('Àbcd', 'À', 'Ã') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_unhex.q.out b/ql/src/test/results/clientpositive/udf_unhex.q.out index a14118d..1573f15 100644 --- a/ql/src/test/results/clientpositive/udf_unhex.q.out +++ b/ql/src/test/results/clientpositive/udf_unhex.q.out @@ -29,7 +29,7 @@ SELECT unhex('61'), unhex('2D34'), unhex('') -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -41,7 +41,7 @@ SELECT unhex('61'), unhex('2D34'), unhex('') -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -51,7 +51,7 @@ SELECT unhex('MySQL'), unhex('G123'), unhex('\0') -FROM src limit 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -60,7 +60,7 @@ SELECT unhex('MySQL'), unhex('G123'), unhex('\0') -FROM src limit 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_union.q.out b/ql/src/test/results/clientpositive/udf_union.q.out index b07b8de..6d11174 100644 --- a/ql/src/test/results/clientpositive/udf_union.q.out +++ b/ql/src/test/results/clientpositive/udf_union.q.out @@ -14,59 +14,48 @@ Example: PREHOOK: query: EXPLAIN SELECT create_union(0, key), create_union(if(key<100, 0, 1), 2.0, value), create_union(1, "a", struct(2, "b")) -FROM src LIMIT 2 +FROM src tablesample (2 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT create_union(0, key), create_union(if(key<100, 0, 1), 2.0, value), create_union(1, "a", struct(2, "b")) -FROM src LIMIT 2 +FROM src tablesample (2 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION create_union 0 (TOK_TABLE_OR_COL key))) (TOK_SELEXPR (TOK_FUNCTION create_union (TOK_FUNCTION if (< (TOK_TABLE_OR_COL key) 100) 0 1) 2.0 (TOK_TABLE_OR_COL value))) (TOK_SELEXPR (TOK_FUNCTION create_union 1 "a" (TOK_FUNCTION struct 2 "b")))) (TOK_LIMIT 2))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION create_union 0 (TOK_TABLE_OR_COL key))) (TOK_SELEXPR (TOK_FUNCTION create_union (TOK_FUNCTION if (< (TOK_TABLE_OR_COL key) 100) 0 1) 2.0 (TOK_TABLE_OR_COL value))) (TOK_SELEXPR (TOK_FUNCTION create_union 1 "a" (TOK_FUNCTION struct 2 "b")))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: create_union(0,key) - type: uniontype - expr: create_union(if((key < 100), 0, 1),2.0,value) - type: uniontype - expr: create_union(1,'a',struct(2,'b')) - type: uniontype> - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 2 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 2 + Select Operator + expressions: + expr: create_union(0,key) + type: uniontype + expr: create_union(if((key < 100), 0, 1),2.0,value) + type: uniontype + expr: create_union(1,'a',struct(2,'b')) + type: uniontype> + outputColumnNames: _col0, _col1, _col2 + ListSink PREHOOK: query: SELECT create_union(0, key), create_union(if(key<100, 0, 1), 2.0, value), create_union(1, "a", struct(2, "b")) -FROM src LIMIT 2 +FROM src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT create_union(0, key), create_union(if(key<100, 0, 1), 2.0, value), create_union(1, "a", struct(2, "b")) -FROM src LIMIT 2 +FROM src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_weekofyear.q.out b/ql/src/test/results/clientpositive/udf_weekofyear.q.out index f28d2a0..40cab0d 100644 --- a/ql/src/test/results/clientpositive/udf_weekofyear.q.out +++ b/ql/src/test/results/clientpositive/udf_weekofyear.q.out @@ -15,13 +15,13 @@ Examples: 1 PREHOOK: query: SELECT weekofyear('1980-01-01'), weekofyear('1980-01-06'), weekofyear('1980-01-07'), weekofyear('1980-12-31'), weekofyear('1984-1-1'), weekofyear('2008-02-20 00:00:00'), weekofyear('1980-12-28 23:59:59'), weekofyear('1980-12-29 23:59:59') -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### POSTHOOK: query: SELECT weekofyear('1980-01-01'), weekofyear('1980-01-06'), weekofyear('1980-01-07'), weekofyear('1980-12-31'), weekofyear('1984-1-1'), weekofyear('2008-02-20 00:00:00'), weekofyear('1980-12-28 23:59:59'), weekofyear('1980-12-29 23:59:59') -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_when.q.out b/ql/src/test/results/clientpositive/udf_when.q.out index 9320c4f..367f48a 100644 --- a/ql/src/test/results/clientpositive/udf_when.q.out +++ b/ql/src/test/results/clientpositive/udf_when.q.out @@ -34,7 +34,7 @@ SELECT CASE WHEN 25=26 THEN 27 WHEN 28=28 THEN NULL END -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT CASE @@ -62,49 +62,38 @@ SELECT CASE WHEN 25=26 THEN 27 WHEN 28=28 THEN NULL END -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION WHEN (= 1 1) 2 (= 1 3) 4 5)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 6 7) 8 9)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 10 11) 12 (= 13 13) 14)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 15 16) 17 (= 18 19) 20)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 21 22) TOK_NULL (= 23 23) 24)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 25 26) 27 (= 28 28) TOK_NULL))) (TOK_LIMIT 1))) + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION WHEN (= 1 1) 2 (= 1 3) 4 5)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 6 7) 8 9)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 10 11) 12 (= 13 13) 14)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 15 16) 17 (= 18 19) 20)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 21 22) TOK_NULL (= 23 23) 24)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 25 26) 27 (= 28 28) TOK_NULL))))) STAGE DEPENDENCIES: - Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Alias -> Map Operator Tree: - src - TableScan - alias: src - Select Operator - expressions: - expr: CASE WHEN ((1 = 1)) THEN (2) WHEN ((1 = 3)) THEN (4) ELSE (5) END - type: int - expr: CASE WHEN ((6 = 7)) THEN (8) ELSE (9) END - type: int - expr: CASE WHEN ((10 = 11)) THEN (12) WHEN ((13 = 13)) THEN (14) END - type: int - expr: CASE WHEN ((15 = 16)) THEN (17) WHEN ((18 = 19)) THEN (20) END - type: int - expr: CASE WHEN ((21 = 22)) THEN (null) WHEN ((23 = 23)) THEN (24) END - type: int - expr: CASE WHEN ((25 = 26)) THEN (27) WHEN ((28 = 28)) THEN (null) END - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: CASE WHEN ((1 = 1)) THEN (2) WHEN ((1 = 3)) THEN (4) ELSE (5) END + type: int + expr: CASE WHEN ((6 = 7)) THEN (8) ELSE (9) END + type: int + expr: CASE WHEN ((10 = 11)) THEN (12) WHEN ((13 = 13)) THEN (14) END + type: int + expr: CASE WHEN ((15 = 16)) THEN (17) WHEN ((18 = 19)) THEN (20) END + type: int + expr: CASE WHEN ((21 = 22)) THEN (null) WHEN ((23 = 23)) THEN (24) END + type: int + expr: CASE WHEN ((25 = 26)) THEN (27) WHEN ((28 = 28)) THEN (null) END + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + ListSink PREHOOK: query: SELECT CASE @@ -132,7 +121,7 @@ PREHOOK: query: SELECT CASE WHEN 25=26 THEN 27 WHEN 28=28 THEN NULL END -FROM src LIMIT 1 +FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### @@ -161,7 +150,7 @@ POSTHOOK: query: SELECT CASE WHEN 25=26 THEN 27 WHEN 28=28 THEN NULL END -FROM src LIMIT 1 +FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath.q.out b/ql/src/test/results/clientpositive/udf_xpath.q.out index b6769a4..9992895 100644 --- a/ql/src/test/results/clientpositive/udf_xpath.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath.q.out @@ -15,47 +15,47 @@ Example: ["b1","b2","b3"] > SELECT xpath('b1b2b3c1c2', 'a/c/text()') FROM src LIMIT 1 ["c1","c2"] -PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/text()') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/text()') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/text()') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/text()') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### [] -PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*/text()') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*/text()') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*/text()') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*/text()') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ["b1","b2","b3","c1","c2"] -PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/b/text()') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/b/text()') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/b/text()') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/b/text()') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ["b1","b2","b3"] -PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/c/text()') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/c/text()') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/c/text()') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/c/text()') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ["c1","c2"] -PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*[@class="bb"]/text()') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*[@class="bb"]/text()') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*[@class="bb"]/text()') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath ('b1b2b3c1c2', 'a/*[@class="bb"]/text()') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_boolean.q.out b/ql/src/test/results/clientpositive/udf_xpath_boolean.q.out index 8f7484d..cf277f8 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_boolean.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_boolean.q.out @@ -13,56 +13,56 @@ Example: true > SELECT xpath_boolean('1','a/b = 2') FROM src LIMIT 1; false -PREHOOK: query: SELECT xpath_boolean ('b', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_boolean ('b', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_boolean ('b', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_boolean ('b', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT xpath_boolean ('b', 'a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_boolean ('b', 'a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_boolean ('b', 'a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_boolean ('b', 'a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT xpath_boolean ('b', 'a/b = "b"') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_boolean ('b', 'a/b = "b"') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_boolean ('b', 'a/b = "b"') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_boolean ('b', 'a/b = "b"') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### true -PREHOOK: query: SELECT xpath_boolean ('b', 'a/b = "c"') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_boolean ('b', 'a/b = "c"') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_boolean ('b', 'a/b = "c"') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_boolean ('b', 'a/b = "c"') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT xpath_boolean ('10', 'a/b < 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_boolean ('10', 'a/b < 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_boolean ('10', 'a/b < 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_boolean ('10', 'a/b < 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### false -PREHOOK: query: SELECT xpath_boolean ('10', 'a/b = 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_boolean ('10', 'a/b = 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_boolean ('10', 'a/b = 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_boolean ('10', 'a/b = 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_double.q.out b/ql/src/test/results/clientpositive/udf_xpath_double.q.out index ddedbc2..dd63858 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_double.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_double.q.out @@ -26,74 +26,74 @@ Synonyms: xpath_number Example: > SELECT xpath_double('12','sum(a/b)') FROM src LIMIT 1; 3.0 -PREHOOK: query: SELECT xpath_double ('this is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('this is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('this is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('this is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NaN -PREHOOK: query: SELECT xpath_double ('this 2 is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('this 2 is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('this 2 is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('this 2 is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NaN -PREHOOK: query: SELECT xpath_double ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 8.0E19 -PREHOOK: query: SELECT xpath_double ('try a boolean', 'a = 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('try a boolean', 'a = 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('try a boolean', 'a = 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('try a boolean', 'a = 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0.0 -PREHOOK: query: SELECT xpath_double ('1248', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('1248', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('1248', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('1248', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.0 -PREHOOK: query: SELECT xpath_double ('1248', 'sum(a/*)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('1248', 'sum(a/*)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('1248', 'sum(a/*)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('1248', 'sum(a/*)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 15.0 -PREHOOK: query: SELECT xpath_double ('1248', 'sum(a/b)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('1248', 'sum(a/b)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('1248', 'sum(a/b)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('1248', 'sum(a/b)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 7.0 -PREHOOK: query: SELECT xpath_double ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_double ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_double ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_double ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_float.q.out b/ql/src/test/results/clientpositive/udf_xpath_float.q.out index 45b4ccc..b90edb5 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_float.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_float.q.out @@ -11,74 +11,74 @@ xpath_float(xml, xpath) - Returns a float value that matches the xpath expressio Example: > SELECT xpath_float('12','sum(a/b)') FROM src LIMIT 1; 3.0 -PREHOOK: query: SELECT xpath_float ('this is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('this is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('this is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('this is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NaN -PREHOOK: query: SELECT xpath_float ('this 2 is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('this 2 is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('this 2 is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('this 2 is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NaN -PREHOOK: query: SELECT xpath_float ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 8.0E19 -PREHOOK: query: SELECT xpath_float ('try a boolean', 'a = 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('try a boolean', 'a = 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('try a boolean', 'a = 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('try a boolean', 'a = 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0.0 -PREHOOK: query: SELECT xpath_float ('1248', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('1248', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('1248', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('1248', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1.0 -PREHOOK: query: SELECT xpath_float ('1248', 'sum(a/*)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('1248', 'sum(a/*)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('1248', 'sum(a/*)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('1248', 'sum(a/*)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 15.0 -PREHOOK: query: SELECT xpath_float ('1248', 'sum(a/b)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('1248', 'sum(a/b)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('1248', 'sum(a/b)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('1248', 'sum(a/b)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 7.0 -PREHOOK: query: SELECT xpath_float ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_float ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_float ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_float ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_int.q.out b/ql/src/test/results/clientpositive/udf_xpath_int.q.out index 74c26de..f958511 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_int.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_int.q.out @@ -11,74 +11,74 @@ xpath_int(xml, xpath) - Returns an integer value that matches the xpath expressi Example: > SELECT xpath_int('12','sum(a/b)') FROM src LIMIT 1; 3 -PREHOOK: query: SELECT xpath_int ('this is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('this is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('this is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('this is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_int ('this 2 is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('this 2 is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('this 2 is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('this 2 is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_int ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 2147483647 -PREHOOK: query: SELECT xpath_int ('try a boolean', 'a = 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('try a boolean', 'a = 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('try a boolean', 'a = 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('try a boolean', 'a = 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_int ('1248', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('1248', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('1248', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('1248', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT xpath_int ('1248', 'sum(a/*)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('1248', 'sum(a/*)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('1248', 'sum(a/*)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('1248', 'sum(a/*)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 15 -PREHOOK: query: SELECT xpath_int ('1248', 'sum(a/b)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('1248', 'sum(a/b)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('1248', 'sum(a/b)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('1248', 'sum(a/b)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 7 -PREHOOK: query: SELECT xpath_int ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_int ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_int ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_int ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_long.q.out b/ql/src/test/results/clientpositive/udf_xpath_long.q.out index e5fb7f6..bd7f88c 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_long.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_long.q.out @@ -11,74 +11,74 @@ xpath_long(xml, xpath) - Returns a long value that matches the xpath expression Example: > SELECT xpath_long('12','sum(a/b)') FROM src LIMIT 1; 3 -PREHOOK: query: SELECT xpath_long ('this is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('this is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('this is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('this is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_long ('this 2 is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('this 2 is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('this 2 is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('this 2 is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_long ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 9223372036854775807 -PREHOOK: query: SELECT xpath_long ('try a boolean', 'a = 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('try a boolean', 'a = 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('try a boolean', 'a = 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('try a boolean', 'a = 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_long ('1248', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('1248', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('1248', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('1248', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT xpath_long ('1248', 'sum(a/*)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('1248', 'sum(a/*)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('1248', 'sum(a/*)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('1248', 'sum(a/*)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 15 -PREHOOK: query: SELECT xpath_long ('1248', 'sum(a/b)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('1248', 'sum(a/b)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('1248', 'sum(a/b)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('1248', 'sum(a/b)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 7 -PREHOOK: query: SELECT xpath_long ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_long ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_long ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_long ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_short.q.out b/ql/src/test/results/clientpositive/udf_xpath_short.q.out index 4ef5f25..ef1e962 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_short.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_short.q.out @@ -11,74 +11,74 @@ xpath_short(xml, xpath) - Returns a short value that matches the xpath expressio Example: > SELECT xpath_short('12','sum(a/b)') FROM src LIMIT 1; 3 -PREHOOK: query: SELECT xpath_short ('this is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('this is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('this is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('this is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_short ('this 2 is not a number', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('this 2 is not a number', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('this 2 is not a number', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('this 2 is not a number', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_short ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('200000000040000000000', 'a/b * a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('200000000040000000000', 'a/b * a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -1 -PREHOOK: query: SELECT xpath_short ('try a boolean', 'a = 10') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('try a boolean', 'a = 10') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('try a boolean', 'a = 10') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('try a boolean', 'a = 10') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 0 -PREHOOK: query: SELECT xpath_short ('1248', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('1248', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('1248', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('1248', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 1 -PREHOOK: query: SELECT xpath_short ('1248', 'sum(a/*)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('1248', 'sum(a/*)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('1248', 'sum(a/*)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('1248', 'sum(a/*)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 15 -PREHOOK: query: SELECT xpath_short ('1248', 'sum(a/b)') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('1248', 'sum(a/b)') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('1248', 'sum(a/b)') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('1248', 'sum(a/b)') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### 7 -PREHOOK: query: SELECT xpath_short ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_short ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_short ('1248', 'sum(a/b[@class="odd"])') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_short ('1248', 'sum(a/b[@class="odd"])') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udf_xpath_string.q.out b/ql/src/test/results/clientpositive/udf_xpath_string.q.out index bd3d2af..a46ee9b 100644 --- a/ql/src/test/results/clientpositive/udf_xpath_string.q.out +++ b/ql/src/test/results/clientpositive/udf_xpath_string.q.out @@ -17,74 +17,74 @@ Example: 'b2' > SELECT xpath_string('b1b2','a') FROM src LIMIT 1; 'b1b2' -PREHOOK: query: SELECT xpath_string ('bbcc', 'a') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('bbcc', 'a') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('bbcc', 'a') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('bbcc', 'a') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### bbcc -PREHOOK: query: SELECT xpath_string ('bbcc', 'a/b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('bbcc', 'a/b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('bbcc', 'a/b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('bbcc', 'a/b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### bb -PREHOOK: query: SELECT xpath_string ('bbcc', 'a/c') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('bbcc', 'a/c') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('bbcc', 'a/c') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('bbcc', 'a/c') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### cc -PREHOOK: query: SELECT xpath_string ('bbcc', 'a/d') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('bbcc', 'a/d') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('bbcc', 'a/d') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('bbcc', 'a/d') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -PREHOOK: query: SELECT xpath_string ('b1b2', '//b') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('b1b2', '//b') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('b1b2', '//b') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('b1b2', '//b') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### b1 -PREHOOK: query: SELECT xpath_string ('b1b2', 'a/b[1]') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('b1b2', 'a/b[1]') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('b1b2', 'a/b[1]') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('b1b2', 'a/b[1]') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### b1 -PREHOOK: query: SELECT xpath_string ('b1b2', 'a/b[2]') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('b1b2', 'a/b[2]') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('b1b2', 'a/b[2]') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('b1b2', 'a/b[2]') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### b2 -PREHOOK: query: SELECT xpath_string ('b1b2', 'a/b[@id="b_2"]') FROM src LIMIT 1 +PREHOOK: query: SELECT xpath_string ('b1b2', 'a/b[@id="b_2"]') FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: SELECT xpath_string ('b1b2', 'a/b[@id="b_2"]') FROM src LIMIT 1 +POSTHOOK: query: SELECT xpath_string ('b1b2', 'a/b[@id="b_2"]') FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/udtf_json_tuple.q.out b/ql/src/test/results/clientpositive/udtf_json_tuple.q.out index 80f0725..bfb83a3 100644 --- a/ql/src/test/results/clientpositive/udtf_json_tuple.q.out +++ b/ql/src/test/results/clientpositive/udtf_json_tuple.q.out @@ -5,34 +5,34 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@json_t PREHOOK: query: insert overwrite table json_t select * from ( - select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src limit 1 + select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src tablesample (1 rows) union all - select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src limit 1 + select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src tablesample (1 rows) union all - select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src limit 1 + select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', '{"f1": "", "f5": null}' from src limit 1 + select '5', '{"f1": "", "f5": null}' from src tablesample (1 rows) union all - select '6', '[invalid JSON string]' from src limit 1 + select '6', '[invalid JSON string]' from src tablesample (1 rows) ) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@json_t POSTHOOK: query: insert overwrite table json_t select * from ( - select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src limit 1 + select '1', '{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}' from src tablesample (1 rows) union all - select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src limit 1 + select '2', '{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}' from src tablesample (1 rows) union all - select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src limit 1 + select '3', '{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', '{"f1": "", "f5": null}' from src limit 1 + select '5', '{"f1": "", "f5": null}' from src tablesample (1 rows) union all - select '6', '[invalid JSON string]' from src limit 1 + select '6', '[invalid JSON string]' from src tablesample (1 rows) ) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -588,11 +588,11 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 POSTHOOK: Lineage: json_t.jstring EXPRESSION [] POSTHOOK: Lineage: json_t.key EXPRESSION [] -PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest1 -POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT '{"a":"b\nc"}' FROM src tablesample (1 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 diff --git a/ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out b/ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out index 7fbd74d..3894e33 100644 --- a/ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out +++ b/ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out @@ -5,34 +5,34 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@url_t PREHOOK: query: insert overwrite table url_t select * from ( - select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src limit 1 + select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src tablesample (1 rows) union all - select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src limit 1 + select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src tablesample (1 rows) union all - select '3', 'ftp://sites.google.com/a/example.com/site/page' from src limit 1 + select '3', 'ftp://sites.google.com/a/example.com/site/page' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', 'htttp://' from src limit 1 + select '5', 'htttp://' from src tablesample (1 rows) union all - select '6', '[invalid url string]' from src limit 1 + select '6', '[invalid url string]' from src tablesample (1 rows) ) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@url_t POSTHOOK: query: insert overwrite table url_t select * from ( - select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src limit 1 + select '1', 'http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1' from src tablesample (1 rows) union all - select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src limit 1 + select '2', 'https://www.socs.uts.edu.au:80/MosaicDocs-old/url-primer.html?k1=tps#chapter1' from src tablesample (1 rows) union all - select '3', 'ftp://sites.google.com/a/example.com/site/page' from src limit 1 + select '3', 'ftp://sites.google.com/a/example.com/site/page' from src tablesample (1 rows) union all - select '4', cast(null as string) from src limit 1 + select '4', cast(null as string) from src tablesample (1 rows) union all - select '5', 'htttp://' from src limit 1 + select '5', 'htttp://' from src tablesample (1 rows) union all - select '6', '[invalid url string]' from src limit 1 + select '6', '[invalid url string]' from src tablesample (1 rows) ) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src diff --git a/ql/src/test/results/clientpositive/union34.q.out b/ql/src/test/results/clientpositive/union34.q.out index 4d3ba52..18ac92f 100644 --- a/ql/src/test/results/clientpositive/union34.q.out +++ b/ql/src/test/results/clientpositive/union34.q.out @@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table src10_4 (key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@src10_4 -PREHOOK: query: from (select * from src limit 10) a +PREHOOK: query: from (select * from src tablesample (10 rows)) a insert overwrite table src10_1 select * insert overwrite table src10_2 select * insert overwrite table src10_3 select * @@ -29,7 +29,7 @@ PREHOOK: Output: default@src10_1 PREHOOK: Output: default@src10_2 PREHOOK: Output: default@src10_3 PREHOOK: Output: default@src10_4 -POSTHOOK: query: from (select * from src limit 10) a +POSTHOOK: query: from (select * from src tablesample (10 rows)) a insert overwrite table src10_1 select * insert overwrite table src10_2 select * insert overwrite table src10_3 select *