diff --git ql/src/test/queries/clientpositive/acid_vectorization_original.q ql/src/test/queries/clientpositive/acid_vectorization_original.q index ddf138decb..0b91f6901a 100644 --- ql/src/test/queries/clientpositive/acid_vectorization_original.q +++ ql/src/test/queries/clientpositive/acid_vectorization_original.q @@ -4,8 +4,6 @@ set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; set hive.exec.dynamic.partition.mode=nonstrict; set hive.vectorized.execution.enabled=true; --- enables vectorizaiton of VirtualColumn.ROWID -set hive.vectorized.row.identifier.enabled=true; -- enable ppd set hive.optimize.index.filter=true; @@ -60,13 +58,12 @@ CREATE TABLE over10k_orc_bucketed(t tinyint, -- this produces about 250 distinct values across all 4 equivalence classes select distinct si, si%4 from over10k order by si; --- explain insert into over10k_orc_bucketed select * from over10k cluster by si; --- w/o "cluster by" all data is written to 000000_0 -insert into over10k_orc_bucketed select * from over10k cluster by si; +-- explain insert into over10k_orc_bucketed select * from over10k; +insert into over10k_orc_bucketed select * from over10k; dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/over10k_orc_bucketed; -- create copy_N files -insert into over10k_orc_bucketed select * from over10k cluster by si; +insert into over10k_orc_bucketed select * from over10k; -- this output of this is masked in .out - it is visible in .orig dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/over10k_orc_bucketed; diff --git ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q index 4d9366210b..00a3ab2ce5 100644 --- ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q +++ ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q @@ -4,8 +4,6 @@ set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; set hive.exec.dynamic.partition.mode=nonstrict; set hive.vectorized.execution.enabled=true; --- enables vectorizaiton of VirtualColumn.ROWID -set hive.vectorized.row.identifier.enabled=true; -- enable ppd set hive.optimize.index.filter=true; @@ -60,13 +58,12 @@ CREATE TABLE over10k_orc_bucketed(t tinyint, -- this produces about 250 distinct values across all 4 equivalence classes select distinct si, si%4 from over10k order by si; --- explain insert into over10k_orc_bucketed select * from over10k cluster by si; --- w/o "cluster by" all data is written to 000000_0 -insert into over10k_orc_bucketed select * from over10k cluster by si; +-- explain insert into over10k_orc_bucketed select * from over10k; +insert into over10k_orc_bucketed select * from over10k; dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/over10k_orc_bucketed; -- create copy_N files -insert into over10k_orc_bucketed select * from over10k cluster by si; +insert into over10k_orc_bucketed select * from over10k; -- this output of this is masked in .out - it is visible in .orig dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/over10k_orc_bucketed; @@ -109,7 +106,7 @@ explain select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID havi select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1; -- schedule compactor -alter table over10k_orc_bucketed compact 'major' WITH OVERWRITE TBLPROPERTIES ("compactor.mapreduce.map.memory.mb"="500","compactor.hive.tez.container.size"="500");; +alter table over10k_orc_bucketed compact 'major' WITH OVERWRITE TBLPROPERTIES ('compactor.mapreduce.map.memory.mb'='500', 'compactor.mapreduce.reduce.memory.mb'='500','compactor.mapreduce.map.memory.mb'='500', 'compactor.hive.tez.container.size'='500'); -- run compactor - this currently fails with diff --git ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out index 1a2aff7bf3..f7022b58fa 100644 --- ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out +++ ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out @@ -348,11 +348,11 @@ NULL NULL 509 1 510 2 511 3 -PREHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +PREHOOK: query: insert into over10k_orc_bucketed select * from over10k PREHOOK: type: QUERY PREHOOK: Input: default@over10k PREHOOK: Output: default@over10k_orc_bucketed -POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k POSTHOOK: Output: default@over10k_orc_bucketed @@ -369,11 +369,11 @@ POSTHOOK: Lineage: over10k_orc_bucketed.t SIMPLE [(over10k)over10k.FieldSchema(n POSTHOOK: Lineage: over10k_orc_bucketed.ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ] Found 4 items #### A masked pattern was here #### -PREHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +PREHOOK: query: insert into over10k_orc_bucketed select * from over10k PREHOOK: type: QUERY PREHOOK: Input: default@over10k PREHOOK: Output: default@over10k_orc_bucketed -POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k POSTHOOK: Output: default@over10k_orc_bucketed @@ -541,12 +541,12 @@ POSTHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 42 POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k_orc_bucketed #### A masked pattern was here #### -{"transactionid":0,"bucketid":536870912,"rowid":104} -3 344 65733 -{"transactionid":0,"bucketid":536870912,"rowid":368} -3 344 65733 -{"transactionid":0,"bucketid":536936448,"rowid":250} 5 501 65585 -{"transactionid":0,"bucketid":536936448,"rowid":512} 5 501 65585 -{"transactionid":0,"bucketid":537067520,"rowid":224} 35 463 65646 -{"transactionid":0,"bucketid":537067520,"rowid":501} 35 463 65646 +{"transactionid":0,"bucketid":536870912,"rowid":36} -3 344 65733 +{"transactionid":0,"bucketid":536870912,"rowid":300} -3 344 65733 +{"transactionid":0,"bucketid":536936448,"rowid":82} 5 501 65585 +{"transactionid":0,"bucketid":536936448,"rowid":344} 5 501 65585 +{"transactionid":0,"bucketid":537067520,"rowid":211} 35 463 65646 +{"transactionid":0,"bucketid":537067520,"rowid":488} 35 463 65646 PREHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100 PREHOOK: type: QUERY POSTHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100 diff --git ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out index f10af8c907..3314d25b9f 100644 --- ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out +++ ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out @@ -348,11 +348,11 @@ NULL NULL 509 1 510 2 511 3 -PREHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +PREHOOK: query: insert into over10k_orc_bucketed select * from over10k PREHOOK: type: QUERY PREHOOK: Input: default@over10k PREHOOK: Output: default@over10k_orc_bucketed -POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k POSTHOOK: Output: default@over10k_orc_bucketed @@ -369,11 +369,11 @@ POSTHOOK: Lineage: over10k_orc_bucketed.t SIMPLE [(over10k)over10k.FieldSchema(n POSTHOOK: Lineage: over10k_orc_bucketed.ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ] Found 4 items #### A masked pattern was here #### -PREHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +PREHOOK: query: insert into over10k_orc_bucketed select * from over10k PREHOOK: type: QUERY PREHOOK: Input: default@over10k PREHOOK: Output: default@over10k_orc_bucketed -POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k cluster by si +POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k POSTHOOK: Output: default@over10k_orc_bucketed @@ -539,12 +539,12 @@ POSTHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 42 POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k_orc_bucketed #### A masked pattern was here #### -{"transactionid":0,"bucketid":536870912,"rowid":104} -3 344 65733 -{"transactionid":0,"bucketid":536870912,"rowid":368} -3 344 65733 -{"transactionid":0,"bucketid":536936448,"rowid":250} 5 501 65585 -{"transactionid":0,"bucketid":536936448,"rowid":512} 5 501 65585 -{"transactionid":0,"bucketid":537067520,"rowid":224} 35 463 65646 -{"transactionid":0,"bucketid":537067520,"rowid":501} 35 463 65646 +{"transactionid":0,"bucketid":536870912,"rowid":36} -3 344 65733 +{"transactionid":0,"bucketid":536870912,"rowid":300} -3 344 65733 +{"transactionid":0,"bucketid":536936448,"rowid":82} 5 501 65585 +{"transactionid":0,"bucketid":536936448,"rowid":344} 5 501 65585 +{"transactionid":0,"bucketid":537067520,"rowid":211} 35 463 65646 +{"transactionid":0,"bucketid":537067520,"rowid":488} 35 463 65646 PREHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100 PREHOOK: type: QUERY POSTHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100 @@ -710,9 +710,9 @@ POSTHOOK: query: select ROW__ID, count(*) from over10k_orc_bucketed group by ROW POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k_orc_bucketed #### A masked pattern was here #### -PREHOOK: query: alter table over10k_orc_bucketed compact 'major' WITH OVERWRITE TBLPROPERTIES ("compactor.mapreduce.map.memory.mb"="500","compactor.hive.tez.container.size"="500") +PREHOOK: query: alter table over10k_orc_bucketed compact 'major' WITH OVERWRITE TBLPROPERTIES ('compactor.mapreduce.map.memory.mb'='500', 'compactor.mapreduce.reduce.memory.mb'='500','compactor.mapreduce.map.memory.mb'='500', 'compactor.hive.tez.container.size'='500') PREHOOK: type: ALTERTABLE_COMPACT -POSTHOOK: query: alter table over10k_orc_bucketed compact 'major' WITH OVERWRITE TBLPROPERTIES ("compactor.mapreduce.map.memory.mb"="500","compactor.hive.tez.container.size"="500") +POSTHOOK: query: alter table over10k_orc_bucketed compact 'major' WITH OVERWRITE TBLPROPERTIES ('compactor.mapreduce.map.memory.mb'='500', 'compactor.mapreduce.reduce.memory.mb'='500','compactor.mapreduce.map.memory.mb'='500', 'compactor.hive.tez.container.size'='500') POSTHOOK: type: ALTERTABLE_COMPACT PREHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID PREHOOK: type: QUERY