commit 7357b930f1d24b66acbc7b4312bcfecbd119d106 Author: Janaki Lahorani Date: Fri Jan 19 09:55:07 2018 -0800 HIVE-18499: Amend point lookup tests to check data Change-Id: Iee703e9f479f26b0b2fd0ee60c5bd26c15b6d98a diff --git ql/src/test/queries/clientpositive/pointlookup.q ql/src/test/queries/clientpositive/pointlookup.q index c460f39feddbba7b5334b1b093b1fed06bd3123b..951f77e6d1589168417a93f20dde202ef74edfe1 100644 --- ql/src/test/queries/clientpositive/pointlookup.q +++ ql/src/test/queries/clientpositive/pointlookup.q @@ -2,20 +2,35 @@ explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) ; +create table orOutput as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +; set hive.optimize.point.lookup.min=3; set hive.optimize.partition.columns.separate=false; @@ -23,18 +38,34 @@ explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +; + +create table inOutput as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) ; set hive.optimize.partition.columns.separate=true; @@ -42,16 +73,53 @@ explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) ; + +create table inOutputOpt as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +; + +-- Output from all these tables should be the same +select count(*) from orOutput; +select count(*) from inOutput; +select count(*) from inOutputOpt; + +-- check that orOutput and inOutput matches using full outer join +select orOutput.key, inOutput.key +from orOutput full outer join inOutput on (orOutput.key = inOutput.key) +where orOutput.key = null +or inOutput.key = null; + +-- check that ourOutput and inOutputOpt matches using full outer join +select orOutput.key, inOutputOpt.key +from orOutput full outer join inOutputOpt on (orOutput.key = inOutputOpt.key) +where orOutput.key = null +or inOutputOpt.key = null; + +drop table orOutput; +drop table inOutput; +drop table inOutputOpt; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/pointlookup2.q ql/src/test/queries/clientpositive/pointlookup2.q index 58912286c42205d17b7b0643abb1c760a1002129..6e1cb078d5884379b059acb10e3f844192a7272c 100644 --- ql/src/test/queries/clientpositive/pointlookup2.q +++ ql/src/test/queries/clientpositive/pointlookup2.q @@ -14,6 +14,61 @@ insert overwrite table pcr_t2 select ds, key, value where ds='2000-04-08'; from pcr_t1 insert overwrite table pcr_t2 select ds, key, value where ds='2000-04-08' and key=2; +explain extended +select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds; + +explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key; + +explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' +order by t1.key; + +explain extended +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds; + +explain extended +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds; + +select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds; + +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key; + +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds; + +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds; + +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds; + set hive.optimize.point.lookup.min=2; set hive.optimize.partition.columns.separate=true; @@ -47,6 +102,31 @@ from pcr_t1 t1 join pcr_t2 t2 where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) order by t1.key, t1.value, t2.ds; +select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds; + +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key; + +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds; + +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds; + +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds; + drop table pcr_t1; drop table pcr_t2; drop table pcr_t3; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/pointlookup3.q ql/src/test/queries/clientpositive/pointlookup3.q index e24bae1f0bf39db8c6067d029a32b30b7189a6aa..bb934d02b140d778820acf3137da0c966ee6ccfc 100644 --- ql/src/test/queries/clientpositive/pointlookup3.q +++ ql/src/test/queries/clientpositive/pointlookup3.q @@ -6,6 +6,61 @@ insert overwrite table pcr_t1 partition (ds1='2000-04-08', ds2='2001-04-08') sel insert overwrite table pcr_t1 partition (ds1='2000-04-09', ds2='2001-04-09') select * from src where key < 20 order by key; insert overwrite table pcr_t1 partition (ds1='2000-04-10', ds2='2001-04-10') select * from src where key < 20 order by key; +explain extended +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2; + +explain extended +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2; + +explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1; + +explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1; + +explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1; + +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2; + +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2; + +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1; + +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1; + +select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1; + set hive.optimize.point.lookup.min=2; set hive.optimize.partition.columns.separate=true; @@ -25,13 +80,13 @@ explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' -order by t1.key; +order by t2.key, t2.value, t1.ds1; explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' -order by t1.key; +order by t2.key, t2.value, t1.ds1; explain extended select * @@ -39,4 +94,29 @@ from pcr_t1 t1 join pcr_t1 t2 where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) order by t2.key, t2.value, t1.ds1; +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2; + +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2; + +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1; + +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1; + +select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1; + drop table pcr_t1; diff --git ql/src/test/queries/clientpositive/pointlookup4.q ql/src/test/queries/clientpositive/pointlookup4.q index 58325730604620cc1dc1fca692938030d6ed64b0..ba253010922431c61af9fc189a1f1a3199bda713 100644 --- ql/src/test/queries/clientpositive/pointlookup4.q +++ ql/src/test/queries/clientpositive/pointlookup4.q @@ -15,6 +15,11 @@ from pcr_t1 where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) order by key, value, ds1, ds2; +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) +order by key, value, ds1, ds2; + set hive.optimize.point.lookup=true; set hive.optimize.point.lookup.min=0; set hive.optimize.partition.columns.separate=true; @@ -25,4 +30,9 @@ from pcr_t1 where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) order by key, value, ds1, ds2; +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) +order by key, value, ds1, ds2; + drop table pcr_t1; diff --git ql/src/test/results/clientpositive/pointlookup.q.out ql/src/test/results/clientpositive/pointlookup.q.out index 4b58eb70c687c099f1bd91c8f3fbc24ff10b4ec6..3e3f0019e3db825a388f68b51d0fef1e37f4903b 100644 --- ql/src/test/results/clientpositive/pointlookup.q.out +++ ql/src/test/results/clientpositive/pointlookup.q.out @@ -2,35 +2,33 @@ PREHOOK: query: explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) PREHOOK: type: QUERY POSTHOOK: query: explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -44,7 +42,7 @@ STAGE PLANS: alias: src Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((key = '0') and (value = '8')) or ((key = '1') and (value = '5')) or ((key = '10') and (value = '3')) or ((key = '2') and (value = '6')) or ((key = '3') and (value = '8')) or ((key = '4') and (value = '1')) or ((key = '5') and (value = '6')) or ((key = '6') and (value = '1')) or ((key = '7') and (value = '1')) or ((key = '8') and (value = '1')) or ((key = '9') and (value = '1'))) (type: boolean) + predicate: (((key = '0') and (value = 'val_0')) or ((key = '1') and (value = 'val_1')) or ((key = '10') and (value = 'val_10')) or ((key = '2') and (value = 'val_2')) or ((key = '3') and (value = 'val_3')) or ((key = '4') and (value = 'val_4')) or ((key = '5') and (value = 'val_5')) or ((key = '6') and (value = 'val_6')) or ((key = '7') and (value = 'val_7')) or ((key = '8') and (value = 'val_8')) or ((key = '9') and (value = 'val_9'))) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) @@ -64,39 +62,76 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: create table orOutput as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@orOutput +POSTHOOK: query: create table orOutput as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orOutput +POSTHOOK: Lineage: oroutput.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) PREHOOK: type: QUERY POSTHOOK: query: explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -110,7 +145,7 @@ STAGE PLANS: alias: src Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (struct(key,value)) IN (const struct('0','8'), const struct('1','5'), const struct('2','6'), const struct('3','8'), const struct('4','1'), const struct('5','6'), const struct('6','1'), const struct('7','1'), const struct('8','1'), const struct('9','1'), const struct('10','3')) (type: boolean) + predicate: (struct(key,value)) IN (const struct('0','val_0'), const struct('1','val_1'), const struct('2','val_2'), const struct('3','val_3'), const struct('4','val_4'), const struct('5','val_5'), const struct('6','val_6'), const struct('7','val_7'), const struct('8','val_8'), const struct('9','val_9'), const struct('10','val_10')) (type: boolean) Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) @@ -130,39 +165,76 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: create table inOutput as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@inOutput +POSTHOOK: query: create table inOutput as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@inOutput +POSTHOOK: Lineage: inoutput.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) PREHOOK: type: QUERY POSTHOOK: query: explain SELECT key FROM src WHERE - ((key = '0' - AND value = '8') OR (key = '1' - AND value = '5') OR (key = '2' - AND value = '6') OR (key = '3' - AND value = '8') OR (key = '4' - AND value = '1') OR (key = '5' - AND value = '6') OR (key = '6' - AND value = '1') OR (key = '7' - AND value = '1') OR (key = '8' - AND value = '1') OR (key = '9' - AND value = '1') OR (key = '10' - AND value = '3')) + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -176,7 +248,7 @@ STAGE PLANS: alias: src Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (struct(key,value)) IN (const struct('0','8'), const struct('1','5'), const struct('2','6'), const struct('3','8'), const struct('4','1'), const struct('5','6'), const struct('6','1'), const struct('7','1'), const struct('8','1'), const struct('9','1'), const struct('10','3')) (type: boolean) + predicate: (struct(key,value)) IN (const struct('0','val_0'), const struct('1','val_1'), const struct('2','val_2'), const struct('3','val_3'), const struct('4','val_4'), const struct('5','val_5'), const struct('6','val_6'), const struct('7','val_7'), const struct('8','val_8'), const struct('9','val_9'), const struct('10','val_10')) (type: boolean) Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) @@ -196,3 +268,125 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: create table inOutputOpt as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@inOutputOpt +POSTHOOK: query: create table inOutputOpt as +SELECT key +FROM src +WHERE + ((key = '0' AND value = 'val_0') OR + (key = '1' AND value = 'val_1') OR + (key = '2' AND value = 'val_2') OR + (key = '3' AND value = 'val_3') OR + (key = '4' AND value = 'val_4') OR + (key = '5' AND value = 'val_5') OR + (key = '6' AND value = 'val_6') OR + (key = '7' AND value = 'val_7') OR + (key = '8' AND value = 'val_8') OR + (key = '9' AND value = 'val_9') OR + (key = '10' AND value = 'val_10')) +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@inOutputOpt +POSTHOOK: Lineage: inoutputopt.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +PREHOOK: query: select count(*) from orOutput +PREHOOK: type: QUERY +PREHOOK: Input: default@oroutput +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from orOutput +POSTHOOK: type: QUERY +POSTHOOK: Input: default@oroutput +#### A masked pattern was here #### +11 +PREHOOK: query: select count(*) from inOutput +PREHOOK: type: QUERY +PREHOOK: Input: default@inoutput +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from inOutput +POSTHOOK: type: QUERY +POSTHOOK: Input: default@inoutput +#### A masked pattern was here #### +11 +PREHOOK: query: select count(*) from inOutputOpt +PREHOOK: type: QUERY +PREHOOK: Input: default@inoutputopt +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from inOutputOpt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@inoutputopt +#### A masked pattern was here #### +11 +PREHOOK: query: select orOutput.key, inOutput.key +from orOutput full outer join inOutput on (orOutput.key = inOutput.key) +where orOutput.key = null +or inOutput.key = null +PREHOOK: type: QUERY +PREHOOK: Input: default@inoutput +PREHOOK: Input: default@oroutput +#### A masked pattern was here #### +POSTHOOK: query: select orOutput.key, inOutput.key +from orOutput full outer join inOutput on (orOutput.key = inOutput.key) +where orOutput.key = null +or inOutput.key = null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@inoutput +POSTHOOK: Input: default@oroutput +#### A masked pattern was here #### +PREHOOK: query: select orOutput.key, inOutputOpt.key +from orOutput full outer join inOutputOpt on (orOutput.key = inOutputOpt.key) +where orOutput.key = null +or inOutputOpt.key = null +PREHOOK: type: QUERY +PREHOOK: Input: default@inoutputopt +PREHOOK: Input: default@oroutput +#### A masked pattern was here #### +POSTHOOK: query: select orOutput.key, inOutputOpt.key +from orOutput full outer join inOutputOpt on (orOutput.key = inOutputOpt.key) +where orOutput.key = null +or inOutputOpt.key = null +POSTHOOK: type: QUERY +POSTHOOK: Input: default@inoutputopt +POSTHOOK: Input: default@oroutput +#### A masked pattern was here #### +PREHOOK: query: drop table orOutput +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@oroutput +PREHOOK: Output: default@oroutput +POSTHOOK: query: drop table orOutput +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@oroutput +POSTHOOK: Output: default@oroutput +PREHOOK: query: drop table inOutput +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@inoutput +PREHOOK: Output: default@inoutput +POSTHOOK: query: drop table inOutput +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@inoutput +POSTHOOK: Output: default@inoutput +PREHOOK: query: drop table inOutputOpt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@inoutputopt +PREHOOK: Output: default@inoutputopt +POSTHOOK: query: drop table inOutputOpt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@inoutputopt +POSTHOOK: Output: default@inoutputopt diff --git ql/src/test/results/clientpositive/pointlookup2.q.out ql/src/test/results/clientpositive/pointlookup2.q.out index c99a3223e471657c46d4c35af07f1ff7b6e5e8d4..7391b08541b52d8e33f6dd43b83e0fdbcc5f5f27 100644 --- ql/src/test/results/clientpositive/pointlookup2.q.out +++ ql/src/test/results/clientpositive/pointlookup2.q.out @@ -102,6 +102,1526 @@ STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: pcr_t1 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) (type: boolean) + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + null sort order: aaa + sort order: +++ + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + tag: -1 + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds=2000-04-08 [pcr_t1] + /pcr_t1/ds=2000-04-09 [pcr_t1] + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types int:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col1 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds=2000-04-08 [$hdt$_0:$hdt$_0:t1, $hdt$_0:$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col3, _col4 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col3 (type: int), _col4 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3 + columns.types int,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3 + columns.types int,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3 + columns.types int,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-08' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int:string:string:int:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' +order by t1.key +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' +order by t1.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col1 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds=2000-04-08 [$hdt$_0:$hdt$_0:t1] + /pcr_t1/ds=2000-04-09 [$hdt$_0:$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col3, _col4 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col3 (type: int), _col4 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3 + columns.types int,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3 + columns.types int,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3 + columns.types int,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-09' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int:string:string:int:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Select Operator + expressions: key (type: int), value (type: string), ds (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: ((key = 1) or (key = 2)) (type: boolean) + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ds (type: string), key (type: int), value (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: pcr_t2 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns ds,key,value + columns.comments + columns.types string:int:string +#### A masked pattern was here #### + name default.pcr_t2 + numFiles 1 + numRows 1 + rawDataSize 18 + serialization.ddl struct pcr_t2 { string ds, i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 19 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns ds,key,value + columns.comments + columns.types string:int:string +#### A masked pattern was here #### + name default.pcr_t2 + numFiles 1 + numRows 1 + rawDataSize 18 + serialization.ddl struct pcr_t2 { string ds, i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 19 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t2 + name: default.pcr_t2 + Truncated Path -> Alias: + /pcr_t1/ds=2000-04-08 [$hdt$_0:t1] + /pcr_t1/ds=2000-04-09 [$hdt$_0:t1] + /pcr_t2 [$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 40 Data size: 1080 Basic stats: COMPLETE Column stats: NONE + Filter Operator + isSamplingPred: false + predicate: (((_col2 = '2000-04-08') and (_col4 = 1)) or ((_col2 = '2000-04-09') and (_col4 = 2))) (type: boolean) + Statistics: Num rows: 20 Data size: 540 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col4 (type: int), _col5 (type: string), _col2 (type: string) + null sort order: aaa + sort order: +++ + Statistics: Num rows: 20 Data size: 540 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), KEY.reducesinkkey2 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 20 Data size: 540 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 20 Data size: 540 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int:string:string:string:int:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: ((key = 1) or (key = 2)) (type: boolean) + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: ((ds = '2000-04-08') or (ds = '2000-04-09')) (type: boolean) + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ds (type: string), key (type: int), value (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 18 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds=2000-04-10 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds 2000-04-10 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds + partition_columns.types string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds + partition_columns.types string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: pcr_t2 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns ds,key,value + columns.comments + columns.types string:int:string +#### A masked pattern was here #### + name default.pcr_t2 + numFiles 1 + numRows 1 + rawDataSize 18 + serialization.ddl struct pcr_t2 { string ds, i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 19 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"ds":"true","key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns ds,key,value + columns.comments + columns.types string:int:string +#### A masked pattern was here #### + name default.pcr_t2 + numFiles 1 + numRows 1 + rawDataSize 18 + serialization.ddl struct pcr_t2 { string ds, i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 19 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t2 + name: default.pcr_t2 + Truncated Path -> Alias: + /pcr_t1/ds=2000-04-08 [$hdt$_0:t1] + /pcr_t1/ds=2000-04-09 [$hdt$_0:t1] + /pcr_t1/ds=2000-04-10 [$hdt$_0:t1] + /pcr_t2 [$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 60 Data size: 1620 Basic stats: COMPLETE Column stats: NONE + Filter Operator + isSamplingPred: false + predicate: (((_col3 = '2000-04-08') and (_col0 = 1)) or ((_col3 = '2000-04-09') and (_col0 = 2))) (type: boolean) + Statistics: Num rows: 30 Data size: 810 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string) + null sort order: aaa + sort order: +++ + Statistics: Num rows: 30 Data size: 810 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col2 (type: string), _col4 (type: int), _col5 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,string,int,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey2 (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 30 Data size: 810 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 30 Data size: 810 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int:string:string:string:int:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +#### A masked pattern was here #### +2 val_2 2000-04-09 +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +#### A masked pattern was here #### +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +2 val_2 2000-04-08 2 val_2 2000-04-08 +4 val_4 2000-04-08 4 val_4 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +8 val_8 2000-04-08 8 val_8 2000-04-08 +9 val_9 2000-04-08 9 val_9 2000-04-08 +10 val_10 2000-04-08 10 val_10 2000-04-08 +11 val_11 2000-04-08 11 val_11 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +17 val_17 2000-04-08 17 val_17 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +19 val_19 2000-04-08 19 val_19 2000-04-08 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +PREHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +POSTHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +10 val_10 2000-04-09 2000-04-08 2 val_2 +11 val_11 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +17 val_17 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +19 val_19 2000-04-09 2000-04-08 2 val_2 +2 val_2 2000-04-09 2000-04-08 2 val_2 +4 val_4 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +8 val_8 2000-04-09 2000-04-08 2 val_2 +9 val_9 2000-04-09 2000-04-08 2 val_2 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +PREHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +POSTHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +10 val_10 2000-04-09 2000-04-08 2 val_2 +11 val_11 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +17 val_17 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +19 val_19 2000-04-09 2000-04-08 2 val_2 +2 val_2 2000-04-09 2000-04-08 2 val_2 +4 val_4 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +8 val_8 2000-04-09 2000-04-08 2 val_2 +9 val_9 2000-04-09 2000-04-08 2 val_2 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +PREHOOK: Input: default@pcr_t1@ds=2000-04-10 +PREHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 +POSTHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +PREHOOK: query: explain extended +select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + STAGE PLANS: Stage: Stage-1 Map Reduce @@ -1428,6 +2948,184 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds +from pcr_t1 +where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) +order by key, value, ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +#### A masked pattern was here #### +2 val_2 2000-04-09 +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' +order by t1.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +#### A masked pattern was here #### +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +0 val_0 2000-04-08 0 val_0 2000-04-08 +2 val_2 2000-04-08 2 val_2 2000-04-08 +4 val_4 2000-04-08 4 val_4 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +5 val_5 2000-04-08 5 val_5 2000-04-08 +8 val_8 2000-04-08 8 val_8 2000-04-08 +9 val_9 2000-04-08 9 val_9 2000-04-08 +10 val_10 2000-04-08 10 val_10 2000-04-08 +11 val_11 2000-04-08 11 val_11 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +12 val_12 2000-04-08 12 val_12 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +15 val_15 2000-04-08 15 val_15 2000-04-08 +17 val_17 2000-04-08 17 val_17 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +18 val_18 2000-04-08 18 val_18 2000-04-08 +19 val_19 2000-04-08 19 val_19 2000-04-08 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +PREHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +POSTHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +10 val_10 2000-04-09 2000-04-08 2 val_2 +11 val_11 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +17 val_17 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +19 val_19 2000-04-09 2000-04-08 2 val_2 +2 val_2 2000-04-09 2000-04-08 2 val_2 +4 val_4 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +8 val_8 2000-04-09 2000-04-08 2 val_2 +9 val_9 2000-04-09 2000-04-08 2 val_2 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +PREHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t1.ds='2000-04-08' and t2.key=1) or (t1.ds='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +POSTHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +0 val_0 2000-04-09 2000-04-08 2 val_2 +10 val_10 2000-04-09 2000-04-08 2 val_2 +11 val_11 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +12 val_12 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +15 val_15 2000-04-09 2000-04-08 2 val_2 +17 val_17 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +18 val_18 2000-04-09 2000-04-08 2 val_2 +19 val_19 2000-04-09 2000-04-08 2 val_2 +2 val_2 2000-04-09 2000-04-08 2 val_2 +4 val_4 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +5 val_5 2000-04-09 2000-04-08 2 val_2 +8 val_8 2000-04-09 2000-04-08 2 val_2 +9 val_9 2000-04-09 2000-04-08 2 val_2 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds=2000-04-08 +PREHOOK: Input: default@pcr_t1@ds=2000-04-09 +PREHOOK: Input: default@pcr_t1@ds=2000-04-10 +PREHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t2 t2 +where (t2.ds='2000-04-08' and t1.key=1) or (t2.ds='2000-04-09' and t1.key=2) +order by t1.key, t1.value, t2.ds +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 +POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 +POSTHOOK: Input: default@pcr_t2 +#### A masked pattern was here #### PREHOOK: query: drop table pcr_t1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@pcr_t1 diff --git ql/src/test/results/clientpositive/pointlookup3.q.out ql/src/test/results/clientpositive/pointlookup3.q.out index 0057d1df94869d820ccc21a8bbbe8b865f844e2a..3085693dfba9a892863cb6f4c869bfe9dd064157 100644 --- ql/src/test/results/clientpositive/pointlookup3.q.out +++ ql/src/test/results/clientpositive/pointlookup3.q.out @@ -56,6 +56,1347 @@ STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: pcr_t1 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: (((ds1 = '2000-04-08') and (key = 1)) or ((ds1 = '2000-04-09') and (key = 2))) (type: boolean) + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds1 (type: string), ds2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) + null sort order: aaaa + sort order: ++++ + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + tag: -1 + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-08 + ds2 2001-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-09 + ds2 2001-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds1=2000-04-08/ds2=2001-04-08 [pcr_t1] + /pcr_t1/ds1=2000-04-09/ds2=2001-04-09 [pcr_t1] + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types int:string:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain extended +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: pcr_t1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: (key = 1) (type: boolean) + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + null sort order: aaa + sort order: +++ + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE + tag: -1 + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-08 + ds2 2001-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds1=2000-04-08/ds2=2001-04-08 [$hdt$_0:pcr_t1] + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), '2001-04-08' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types int:string:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds2 (type: string) + outputColumnNames: _col0, _col1, _col3 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: string), _col3 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col1 (type: string), _col2 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-08 + ds2 2001-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds1=2000-04-08/ds2=2001-04-08 [$hdt$_0:$hdt$_0:t1, $hdt$_0:$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col6 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col6 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,int,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col3 (type: int), _col4 (type: string) + null sort order: aa + sort order: ++ + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,int,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,int,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), '2000-04-08' (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col3 (type: string), '2001-04-08' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types int:string:string:string:int:string:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds2 (type: string) + outputColumnNames: _col0, _col1, _col3 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: string), _col3 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds2 (type: string) + outputColumnNames: _col0, _col1, _col3 + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col1 (type: string), _col3 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-08 + ds2 2001-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-09 + ds2 2001-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds1=2000-04-08/ds2=2001-04-08 [$hdt$_0:$hdt$_0:t1] + /pcr_t1/ds1=2000-04-09/ds2=2001-04-09 [$hdt$_0:$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col7 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col7 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,int,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col3 (type: int), _col4 (type: string) + null sort order: aa + sort order: ++ + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,int,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5 + columns.types int,string,string,int,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), '2000-04-08' (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), '2000-04-09' (type: string), VALUE._col3 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types int:string:string:string:int:string:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: t1 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Select Operator + expressions: key (type: int), value (type: string), ds1 (type: string), ds2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) + auto parallelism: false + TableScan + alias: t2 + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: ((key = 1) or (key = 2)) (type: boolean) + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string), ds1 (type: string), ds2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-08 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-08 + ds2 2001-04-08 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-09 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-09 + ds2 2001-04-09 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 +#### A masked pattern was here #### + Partition + base file name: ds2=2001-04-10 + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + partition values: + ds1 2000-04-10 + ds2 2001-04-10 + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + numFiles 1 + numRows 20 + partition_columns ds1/ds2 + partition_columns.types string:string + rawDataSize 160 + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 180 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.pcr_t1 + partition_columns ds1/ds2 + partition_columns.types string:string + serialization.ddl struct pcr_t1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.pcr_t1 + name: default.pcr_t1 + Truncated Path -> Alias: + /pcr_t1/ds1=2000-04-08/ds2=2001-04-08 [$hdt$_0:t1, $hdt$_1:t2] + /pcr_t1/ds1=2000-04-09/ds2=2001-04-09 [$hdt$_0:t1, $hdt$_1:t2] + /pcr_t1/ds1=2000-04-10/ds2=2001-04-10 [$hdt$_1:t2] + Needs Tagging: true + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 2400 Data size: 40800 Basic stats: COMPLETE Column stats: NONE + Filter Operator + isSamplingPred: false + predicate: (((_col2 = '2000-04-08') and (_col4 = 1)) or ((_col2 = '2000-04-09') and (_col4 = 2))) (type: boolean) + Statistics: Num rows: 1200 Data size: 20400 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types int,string,string,string,int,string,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + GatherStats: false + Reduce Output Operator + key expressions: _col4 (type: int), _col5 (type: string), _col2 (type: string) + null sort order: aaa + sort order: +++ + Statistics: Num rows: 1200 Data size: 20400 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: -mr-10004 + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types int,string,string,string,int,string,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + column.name.delimiter , + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types int,string,string,string,int,string,string,string + escape.delim \ + serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Truncated Path -> Alias: +#### A masked pattern was here #### + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), KEY.reducesinkkey2 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 1200 Data size: 20400 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: Num rows: 1200 Data size: 20400 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types int:string:string:string:int:string:string:string + escape.delim \ + hive.serialization.extend.additional.nesting.levels true + serialization.escape.crlf true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +2 val_2 2000-04-09 2001-04-09 +PREHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +2 val_2 2000-04-08 2001-04-08 2 val_2 2000-04-08 2001-04-08 +4 val_4 2000-04-08 2001-04-08 4 val_4 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +8 val_8 2000-04-08 2001-04-08 8 val_8 2000-04-08 2001-04-08 +9 val_9 2000-04-08 2001-04-08 9 val_9 2000-04-08 2001-04-08 +10 val_10 2000-04-08 2001-04-08 10 val_10 2000-04-08 2001-04-08 +11 val_11 2000-04-08 2001-04-08 11 val_11 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +17 val_17 2000-04-08 2001-04-08 17 val_17 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +19 val_19 2000-04-08 2001-04-08 19 val_19 2000-04-08 2001-04-08 +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +2 val_2 2000-04-08 2001-04-08 2 val_2 2000-04-09 2001-04-09 +4 val_4 2000-04-08 2001-04-08 4 val_4 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +8 val_8 2000-04-08 2001-04-08 8 val_8 2000-04-09 2001-04-09 +9 val_9 2000-04-08 2001-04-08 9 val_9 2000-04-09 2001-04-09 +10 val_10 2000-04-08 2001-04-08 10 val_10 2000-04-09 2001-04-09 +11 val_11 2000-04-08 2001-04-08 11 val_11 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +17 val_17 2000-04-08 2001-04-08 17 val_17 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +19 val_19 2000-04-08 2001-04-08 19 val_19 2000-04-09 2001-04-09 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-10/ds2=2001-04-10 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-10/ds2=2001-04-10 +#### A masked pattern was here #### +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +10 val_10 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +10 val_10 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +10 val_10 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +11 val_11 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +11 val_11 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +11 val_11 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +17 val_17 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +17 val_17 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +17 val_17 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +19 val_19 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +19 val_19 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +19 val_19 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +2 val_2 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +2 val_2 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +2 val_2 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +4 val_4 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +4 val_4 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +4 val_4 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +8 val_8 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +8 val_8 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +8 val_8 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +9 val_9 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +9 val_9 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +9 val_9 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +PREHOOK: query: explain extended +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + STAGE PLANS: Stage: Stage-1 Map Reduce @@ -347,13 +1688,13 @@ PREHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' -order by t1.key +order by t2.key, t2.value, t1.ds1 PREHOOK: type: QUERY POSTHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' -order by t1.key +order by t2.key, t2.value, t1.ds1 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -498,12 +1839,12 @@ STAGE PLANS: TableScan GatherStats: false Reduce Output Operator - key expressions: _col0 (type: int) - null sort order: a - sort order: + + key expressions: _col3 (type: int), _col4 (type: string) + null sort order: aa + sort order: ++ Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string), _col5 (type: string) + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: string) auto parallelism: false Path -> Alias: #### A masked pattern was here #### @@ -535,7 +1876,7 @@ STAGE PLANS: Needs Tagging: false Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col4 (type: string), '2001-04-08' (type: string) + expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), '2000-04-08' (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col3 (type: string), '2001-04-08' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -571,13 +1912,13 @@ PREHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' -order by t1.key +order by t2.key, t2.value, t1.ds1 PREHOOK: type: QUERY POSTHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' -order by t1.key +order by t2.key, t2.value, t1.ds1 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -771,12 +2112,12 @@ STAGE PLANS: TableScan GatherStats: false Reduce Output Operator - key expressions: _col0 (type: int) - null sort order: a - sort order: + + key expressions: _col3 (type: int), _col4 (type: string) + null sort order: aa + sort order: ++ Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string), _col5 (type: string) + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: string) auto parallelism: false Path -> Alias: #### A masked pattern was here #### @@ -808,7 +2149,7 @@ STAGE PLANS: Needs Tagging: false Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), '2000-04-09' (type: string), VALUE._col4 (type: string) + expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), '2000-04-08' (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), '2000-04-09' (type: string), VALUE._col3 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -1155,6 +2496,232 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and key=1) or (ds1='2000-04-09' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +2 val_2 2000-04-09 2001-04-09 +PREHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-08' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds2='2001-04-08' +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +#### A masked pattern was here #### +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-08 2001-04-08 +2 val_2 2000-04-08 2001-04-08 2 val_2 2000-04-08 2001-04-08 +4 val_4 2000-04-08 2001-04-08 4 val_4 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-08 2001-04-08 +8 val_8 2000-04-08 2001-04-08 8 val_8 2000-04-08 2001-04-08 +9 val_9 2000-04-08 2001-04-08 9 val_9 2000-04-08 2001-04-08 +10 val_10 2000-04-08 2001-04-08 10 val_10 2000-04-08 2001-04-08 +11 val_11 2000-04-08 2001-04-08 11 val_11 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-08 2001-04-08 +17 val_17 2000-04-08 2001-04-08 17 val_17 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-08 2001-04-08 +19 val_19 2000-04-08 2001-04-08 19 val_19 2000-04-08 2001-04-08 +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +on t1.key=t2.key and t1.ds1='2000-04-08' and t2.ds1='2000-04-09' +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +0 val_0 2000-04-08 2001-04-08 0 val_0 2000-04-09 2001-04-09 +2 val_2 2000-04-08 2001-04-08 2 val_2 2000-04-09 2001-04-09 +4 val_4 2000-04-08 2001-04-08 4 val_4 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +5 val_5 2000-04-08 2001-04-08 5 val_5 2000-04-09 2001-04-09 +8 val_8 2000-04-08 2001-04-08 8 val_8 2000-04-09 2001-04-09 +9 val_9 2000-04-08 2001-04-08 9 val_9 2000-04-09 2001-04-09 +10 val_10 2000-04-08 2001-04-08 10 val_10 2000-04-09 2001-04-09 +11 val_11 2000-04-08 2001-04-08 11 val_11 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +12 val_12 2000-04-08 2001-04-08 12 val_12 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +15 val_15 2000-04-08 2001-04-08 15 val_15 2000-04-09 2001-04-09 +17 val_17 2000-04-08 2001-04-08 17 val_17 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +18 val_18 2000-04-08 2001-04-08 18 val_18 2000-04-09 2001-04-09 +19 val_19 2000-04-08 2001-04-08 19 val_19 2000-04-09 2001-04-09 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-10/ds2=2001-04-10 +#### A masked pattern was here #### +POSTHOOK: query: select * +from pcr_t1 t1 join pcr_t1 t2 +where (t1.ds1='2000-04-08' and t2.key=1) or (t1.ds1='2000-04-09' and t2.key=2) +order by t2.key, t2.value, t1.ds1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-10/ds2=2001-04-10 +#### A masked pattern was here #### +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +0 val_0 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +10 val_10 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +10 val_10 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +10 val_10 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +11 val_11 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +11 val_11 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +11 val_11 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +12 val_12 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +15 val_15 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +17 val_17 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +17 val_17 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +17 val_17 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +18 val_18 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +19 val_19 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +19 val_19 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +19 val_19 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +2 val_2 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +2 val_2 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +2 val_2 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +4 val_4 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +4 val_4 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +4 val_4 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +5 val_5 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +8 val_8 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +8 val_8 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +8 val_8 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 +9 val_9 2000-04-09 2001-04-09 2 val_2 2000-04-10 2001-04-10 +9 val_9 2000-04-09 2001-04-09 2 val_2 2000-04-08 2001-04-08 +9 val_9 2000-04-09 2001-04-09 2 val_2 2000-04-09 2001-04-09 PREHOOK: query: drop table pcr_t1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@pcr_t1 diff --git ql/src/test/results/clientpositive/pointlookup4.q.out ql/src/test/results/clientpositive/pointlookup4.q.out index 3c9cc609038a3af300985acb8c114f083dadefea..a9eb7133c5db6a09fcb671439bb73258389449ed 100644 --- ql/src/test/results/clientpositive/pointlookup4.q.out +++ ql/src/test/results/clientpositive/pointlookup4.q.out @@ -216,6 +216,25 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +2 val_2 2000-04-09 2001-04-09 PREHOOK: query: explain extended select key, value, ds1, ds2 from pcr_t1 @@ -392,6 +411,25 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) +order by key, value, ds1, ds2 +PREHOOK: type: QUERY +PREHOOK: Input: default@pcr_t1 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +PREHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +POSTHOOK: query: select key, value, ds1, ds2 +from pcr_t1 +where (ds1='2000-04-08' and ds2='2001-04-08' and key=1) or (ds1='2000-04-09' and ds2='2001-04-09' and key=2) +order by key, value, ds1, ds2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@pcr_t1 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-08/ds2=2001-04-08 +POSTHOOK: Input: default@pcr_t1@ds1=2000-04-09/ds2=2001-04-09 +#### A masked pattern was here #### +2 val_2 2000-04-09 2001-04-09 PREHOOK: query: drop table pcr_t1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@pcr_t1