diff --git hbase-handler/src/test/results/negative/cascade_dbdrop.q.out hbase-handler/src/test/results/negative/cascade_dbdrop.q.out index c29337d..e26f47b 100644 --- hbase-handler/src/test/results/negative/cascade_dbdrop.q.out +++ hbase-handler/src/test/results/negative/cascade_dbdrop.q.out @@ -17,6 +17,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string") TBLPROPERTIES ("hbase.table.name" = "hbase_table_0") PREHOOK: type: CREATETABLE +PREHOOK: Output: database:hbasedb POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S) -- Hadoop 0.23 changes the behavior FsShell on Exit Codes -- In Hadoop 0.20 @@ -32,6 +33,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string") TBLPROPERTIES ("hbase.table.name" = "hbase_table_0") POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:hbasedb POSTHOOK: Output: hbaseDB@hbase_table_0 Found 3 items #### A masked pattern was here #### @@ -39,9 +41,11 @@ PREHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE PREHOOK: type: DROPDATABASE PREHOOK: Input: database:hbasedb PREHOOK: Output: database:hbasedb +PREHOOK: Output: hbasedb@hbase_table_0 POSTHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE POSTHOOK: type: DROPDATABASE POSTHOOK: Input: database:hbasedb POSTHOOK: Output: database:hbasedb +POSTHOOK: Output: hbasedb@hbase_table_0 Command failed with exit code = 1 Query returned non-zero code: 1, cause: null diff --git ql/src/test/results/clientpositive/combine2.q.out ql/src/test/results/clientpositive/combine2.q.out index 20bc062..7c74f27 100644 --- ql/src/test/results/clientpositive/combine2.q.out +++ ql/src/test/results/clientpositive/combine2.q.out @@ -9,6 +9,7 @@ PREHOOK: query: -- EXCLUDE_OS_WINDOWS create table combine2(key string) partitioned by (value string) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: -- EXCLUDE_OS_WINDOWS -- excluded on windows because of difference in file name encoding logic @@ -16,6 +17,7 @@ POSTHOOK: query: -- EXCLUDE_OS_WINDOWS create table combine2(key string) partitioned by (value string) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@combine2 PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S) -- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0 @@ -70,8 +72,10 @@ POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.Fiel POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: show partitions combine2 PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: default@combine2 POSTHOOK: query: show partitions combine2 POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: default@combine2 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -256,12 +260,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value + partition_columns.types string rawDataSize 2 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -275,10 +281,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -297,12 +305,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 3 + numFiles 1 numRows 3 partition_columns value + partition_columns.types string rawDataSize 3 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -316,10 +326,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -338,12 +350,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value + partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -357,10 +371,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -379,12 +395,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value + partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -398,10 +416,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -420,12 +440,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 3 + numFiles 1 numRows 3 partition_columns value + partition_columns.types string rawDataSize 3 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -439,10 +461,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -461,12 +485,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value + partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -480,10 +506,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -502,12 +530,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value + partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -521,10 +551,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -543,12 +575,14 @@ STAGE PLANS: COLUMN_STATS_ACCURATE true bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value + partition_columns.types string rawDataSize 2 serialization.ddl struct combine2 { string key} serialization.format 1 @@ -562,10 +596,12 @@ STAGE PLANS: properties: bucket_count -1 columns key + columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value + partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git ql/src/test/results/clientpositive/recursive_dir.q.out ql/src/test/results/clientpositive/recursive_dir.q.out index 1baf1fa..62ae6e5 100644 --- ql/src/test/results/clientpositive/recursive_dir.q.out +++ ql/src/test/results/clientpositive/recursive_dir.q.out @@ -2,17 +2,23 @@ PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23) CREATE TABLE fact_daily(x int) PARTITIONED BY (ds STRING) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23) CREATE TABLE fact_daily(x int) PARTITIONED BY (ds STRING) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@fact_daily PREHOOK: query: CREATE TABLE fact_tz(x int) PARTITIONED BY (ds STRING, hr STRING) #### A masked pattern was here #### PREHOOK: type: CREATETABLE +#### A masked pattern was here #### +PREHOOK: Output: database:default POSTHOOK: query: CREATE TABLE fact_tz(x int) PARTITIONED BY (ds STRING, hr STRING) #### A masked pattern was here #### POSTHOOK: type: CREATETABLE +#### A masked pattern was here #### +POSTHOOK: Output: database:default POSTHOOK: Output: default@fact_tz PREHOOK: query: INSERT OVERWRITE TABLE fact_tz PARTITION (ds='1', hr='1') SELECT key+11 FROM src WHERE key=484 @@ -37,11 +43,13 @@ POSTHOOK: Lineage: fact_tz PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSche PREHOOK: query: ALTER TABLE fact_daily ADD PARTITION (ds='1') #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: default@fact_daily +#### A masked pattern was here #### +PREHOOK: Output: default@fact_daily POSTHOOK: query: ALTER TABLE fact_daily ADD PARTITION (ds='1') #### A masked pattern was here #### POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: default@fact_daily +#### A masked pattern was here #### +POSTHOOK: Output: default@fact_daily POSTHOOK: Output: default@fact_daily@ds=1 POSTHOOK: Lineage: fact_tz PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM fact_daily WHERE ds='1' diff --git ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out index b3cce15..edc1f07 100644 --- ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out +++ ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out @@ -3,11 +3,13 @@ PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S) -- create file inputs create table sih_i_part (key int, value string) partitioned by (p string) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S) -- create file inputs create table sih_i_part (key int, value string) partitioned by (p string) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@sih_i_part PREHOOK: query: insert overwrite table sih_i_part partition (p='1') select key, value from src PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out index 2a267c7..d3c0d8d 100644 --- ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out +++ ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out @@ -9,6 +9,7 @@ PREHOOK: query: -- This is to test the union->selectstar->filesink and skewjoin CREATE TABLE T1(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: -- This is to test the union->selectstar->filesink and skewjoin optimization -- Union of 2 map-reduce subqueries is performed for the skew join -- There is no need to write the temporary results of the sub-queries, and then read them @@ -20,25 +21,32 @@ POSTHOOK: query: -- This is to test the union->selectstar->filesink and skewjoin CREATE TABLE T1(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@T1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@t1 POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@t1 PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@T2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2 PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@t2 POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2 POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@t2 PREHOOK: query: -- a simple join query with skew on both the tables on the join key @@ -312,8 +320,10 @@ NULL NULL 5 15 8 28 8 18 PREHOOK: query: create table DEST1(key1 STRING, val1 STRING, key2 STRING, val2 STRING) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table DEST1(key1 STRING, val1 STRING, key2 STRING, val2 STRING) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@DEST1 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE DEST1 diff --git ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out index 5c92675..239389f 100644 --- ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out +++ ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out @@ -1,39 +1,51 @@ PREHOOK: query: CREATE TABLE T1(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@T1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@t1 POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@t1 PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@T2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2 PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@t2 POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2 POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@t2 PREHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@T3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3 PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@t3 POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3 POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@t3 PREHOOK: query: -- This is to test the union->selectstar->filesink and skewjoin optimization -- Union of 3 map-reduce subqueries is performed for the skew join diff --git ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out index a63846a..6288bd5 100644 --- ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out +++ ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out @@ -3,95 +3,129 @@ PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23) CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23) -- 0.23 changed input order of data in reducer task, which affects result of percentile_approx CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@bucket PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@bucket POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@bucket PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@bucket POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@bucket PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@bucket POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@bucket PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket PREHOOK: type: LOAD +#### A masked pattern was here #### PREHOOK: Output: default@bucket POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket POSTHOOK: type: LOAD +#### A masked pattern was here #### POSTHOOK: Output: default@bucket PREHOOK: query: create table t1 (result double) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t1 (result double) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t1 PREHOOK: query: create table t2 (result double) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t2 (result double) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t2 PREHOOK: query: create table t3 (result double) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t3 (result double) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t3 PREHOOK: query: create table t4 (result double) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t4 (result double) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t4 PREHOOK: query: create table t5 (result double) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t5 (result double) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t5 PREHOOK: query: create table t6 (result double) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t6 (result double) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t6 PREHOOK: query: create table t7 (result array) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t7 (result array) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t7 PREHOOK: query: create table t8 (result array) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t8 (result array) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t8 PREHOOK: query: create table t9 (result array) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t9 (result array) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t9 PREHOOK: query: create table t10 (result array) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t10 (result array) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t10 PREHOOK: query: create table t11 (result array) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t11 (result array) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t11 PREHOOK: query: create table t12 (result array) PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default POSTHOOK: query: create table t12 (result array) POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default POSTHOOK: Output: default@t12 PREHOOK: query: -- disable map-side aggregation FROM bucket