diff --git a/ql/src/test/queries/clientpositive/stats_based_fetch_decision.q b/ql/src/test/queries/clientpositive/stats_based_fetch_decision.q index c66cafc..0e1ffff 100644 --- a/ql/src/test/queries/clientpositive/stats_based_fetch_decision.q +++ b/ql/src/test/queries/clientpositive/stats_based_fetch_decision.q @@ -1,15 +1,13 @@ SET hive.fetch.task.conversion=more; SET hive.explain.user=false; --- will not print tez counters as tasks will not be launched select * from src where key is null; select * from srcpart where key is null; explain select * from src where key is null; -explain select * from srcpart where key is null; +explain select key,value from srcpart where key is null; SET hive.fetch.task.conversion.threshold=1000; --- will print tez counters as tasks will be launched select * from src where key is null; select * from srcpart where key is null; explain select * from src where key is null; -explain select * from srcpart where key is null; +explain select key,value from srcpart where key is null; diff --git a/ql/src/test/results/clientnegative/exchange_partition_neg_incomplete_partition.q.out b/ql/src/test/results/clientnegative/exchange_partition_neg_incomplete_partition.q.out index f4c06f9..8f7bbfa 100644 --- a/ql/src/test/results/clientnegative/exchange_partition_neg_incomplete_partition.q.out +++ b/ql/src/test/results/clientnegative/exchange_partition_neg_incomplete_partition.q.out @@ -54,4 +54,4 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: default@exchange_part_test2 ds=2013-04-05/hr=h1 ds=2013-04-05/hr=h2 -FAILED: SemanticException [Error 10234]: Parition values specifed are not continuous. A subpartition value is specified without specififying the parent partition's value {hr=h1} +FAILED: SemanticException [Error 10234]: Partition values specified are not continuous. A subpartition value is specified without specifying the parent partition's value {hr=h1} diff --git a/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out b/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out index dbcf6f4..b582471 100644 --- a/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out +++ b/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out @@ -19,4 +19,4 @@ POSTHOOK: type: LOAD #### A masked pattern was here #### POSTHOOK: Output: default@exim_department #### A masked pattern was here #### -FAILED: SemanticException [Error 10320]: Error while peforming IO operation : No FileSystem for scheme: nosuchschema +FAILED: SemanticException [Error 10320]: Error while performing IO operation : No FileSystem for scheme: nosuchschema diff --git a/ql/src/test/results/clientpositive/llap/stats_based_fetch_decision.q.out b/ql/src/test/results/clientpositive/llap/stats_based_fetch_decision.q.out index f61483b..6575452 100644 --- a/ql/src/test/results/clientpositive/llap/stats_based_fetch_decision.q.out +++ b/ql/src/test/results/clientpositive/llap/stats_based_fetch_decision.q.out @@ -1,10 +1,8 @@ -PREHOOK: query: -- will not print tez counters as tasks will not be launched -select * from src where key is null +PREHOOK: query: select * from src where key is null PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- will not print tez counters as tasks will not be launched -select * from src where key is null +POSTHOOK: query: select * from src where key is null POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -45,9 +43,9 @@ STAGE PLANS: outputColumnNames: _col0, _col1 ListSink -PREHOOK: query: explain select * from srcpart where key is null +PREHOOK: query: explain select key,value from srcpart where key is null PREHOOK: type: QUERY -POSTHOOK: query: explain select * from srcpart where key is null +POSTHOOK: query: explain select key,value from srcpart where key is null POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-0 is a root stage @@ -62,17 +60,15 @@ STAGE PLANS: Filter Operator predicate: key is null (type: boolean) Select Operator - expressions: null (type: string), value (type: string), ds (type: string), hr (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 + expressions: null (type: string), value (type: string) + outputColumnNames: _col0, _col1 ListSink -PREHOOK: query: -- will print tez counters as tasks will be launched -select * from src where key is null +PREHOOK: query: select * from src where key is null PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -POSTHOOK: query: -- will print tez counters as tasks will be launched -select * from src where key is null +POSTHOOK: query: select * from src where key is null POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### @@ -133,9 +129,9 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: explain select * from srcpart where key is null +PREHOOK: query: explain select key,value from srcpart where key is null PREHOOK: type: QUERY -POSTHOOK: query: explain select * from srcpart where key is null +POSTHOOK: query: explain select key,value from srcpart where key is null POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -150,17 +146,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: srcpart - Statistics: Num rows: 2000 Data size: 1092000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is null (type: boolean) - Statistics: Num rows: 1 Data size: 546 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: null (type: string), value (type: string), ds (type: string), hr (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 543 Basic stats: COMPLETE Column stats: COMPLETE + expressions: null (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 175 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 543 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 175 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/str_to_map.q.out b/ql/src/test/results/clientpositive/str_to_map.q.out index 6afe1c8..002e4fb 100644 --- a/ql/src/test/results/clientpositive/str_to_map.q.out +++ b/ql/src/test/results/clientpositive/str_to_map.q.out @@ -8,7 +8,7 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: desc function extended str_to_map POSTHOOK: type: DESCFUNCTION str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text -Split text into key-value pairs using two delimiters. The first delimiter seperates pairs, and the second delimiter sperates key and value. If only one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2. +Split text into key-value pairs using two delimiters. The first delimiter separates pairs, and the second delimiter sperates key and value. If only one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2. Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToMap Function type:BUILTIN PREHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3