diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index ac249ed..6a9e729 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -231,6 +231,7 @@ minitez.query.files.shared=acid_globallimit.q,\ stats_only_null.q,\ subquery_exists.q,\ subquery_in.q,\ + tez_join_hash.q,\ temp_table.q,\ transform1.q,\ transform2.q,\ @@ -451,7 +452,6 @@ minitez.query.files=acid_vectorization_missing_cols.q,\ tez_dynpart_hashjoin_3.q,\ tez_vector_dynpart_hashjoin_1.q,\ tez_vector_dynpart_hashjoin_2.q,\ - tez_join_hash.q,\ tez_join_result_complex.q,\ tez_join_tests.q,\ tez_joins_explain.q,\ diff --git a/ql/src/test/queries/clientpositive/tez_join_hash.q b/ql/src/test/queries/clientpositive/tez_join_hash.q index f2808b8..158c99b 100644 --- a/ql/src/test/queries/clientpositive/tez_join_hash.q +++ b/ql/src/test/queries/clientpositive/tez_join_hash.q @@ -5,7 +5,6 @@ set hive.explain.user=false; create table orc_src (key string, value string) STORED AS ORC; insert into table orc_src select * from src; -set hive.execution.engine=tez; set hive.vectorized.execution.enabled=true; set hive.auto.convert.join.noconditionaltask.size=1; set hive.exec.reducers.bytes.per.reducer=20000; @@ -22,7 +21,7 @@ set hive.mapjoin.hybridgrace.minwbsize=350; set hive.mapjoin.hybridgrace.minnumpartitions=8; explain -select count(*) from (select x.key as key, y.value as value from +select key, count(*) from (select x.key as key, y.value as value from srcpart x join srcpart y on (x.key = y.key) union all select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value; @@ -31,11 +30,3 @@ select key, count(*) from (select x.key as key, y.value as value from srcpart x join srcpart y on (x.key = y.key) union all select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value; - -set hive.execution.engine=mr; -select key, count(*) from (select x.key as key, y.value as value from -srcpart x join srcpart y on (x.key = y.key) -union all -select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value; - - diff --git a/ql/src/test/results/clientpositive/llap/tez_join_hash.q.out b/ql/src/test/results/clientpositive/llap/tez_join_hash.q.out index 26d12e5..f14d9b0 100644 --- a/ql/src/test/results/clientpositive/llap/tez_join_hash.q.out +++ b/ql/src/test/results/clientpositive/llap/tez_join_hash.q.out @@ -130,13 +130,13 @@ POSTHOOK: Input: default@src #### A masked pattern was here #### 1028 PREHOOK: query: explain -select count(*) from (select x.key as key, y.value as value from +select key, count(*) from (select x.key as key, y.value as value from srcpart x join srcpart y on (x.key = y.key) union all select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value PREHOOK: type: QUERY POSTHOOK: query: explain -select count(*) from (select x.key as key, y.value as value from +select key, count(*) from (select x.key as key, y.value as value from srcpart x join srcpart y on (x.key = y.key) union all select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value @@ -288,8 +288,8 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 2310 Data size: 24541 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: bigint) - outputColumnNames: _col0 + expressions: _col0 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1 Statistics: Num rows: 2310 Data size: 24541 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -640,336 +640,3 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 96 20 97 144 98 144 -PREHOOK: query: select key, count(*) from (select x.key as key, y.value as value from -srcpart x join srcpart y on (x.key = y.key) -union all -select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value -PREHOOK: type: QUERY -PREHOOK: Input: default@src -PREHOOK: Input: default@srcpart -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 -PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -#### A masked pattern was here #### -POSTHOOK: query: select key, count(*) from (select x.key as key, y.value as value from -srcpart x join srcpart y on (x.key = y.key) -union all -select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -POSTHOOK: Input: default@srcpart -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 -POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -#### A masked pattern was here #### -0 468 -10 20 -100 144 -103 144 -104 144 -105 20 -11 20 -111 20 -113 144 -114 20 -116 20 -118 144 -119 468 -12 144 -120 144 -125 144 -126 20 -128 468 -129 144 -131 20 -133 20 -134 144 -136 20 -137 144 -138 1088 -143 20 -145 20 -146 144 -149 144 -15 144 -150 20 -152 144 -153 20 -155 20 -156 20 -157 20 -158 20 -160 20 -162 20 -163 20 -164 144 -165 144 -166 20 -167 468 -168 20 -169 1088 -17 20 -170 20 -172 144 -174 144 -175 144 -176 144 -177 20 -178 20 -179 144 -18 144 -180 20 -181 20 -183 20 -186 20 -187 468 -189 20 -19 20 -190 20 -191 144 -192 20 -193 468 -194 20 -195 144 -196 20 -197 144 -199 468 -2 20 -20 20 -200 144 -201 20 -202 20 -203 144 -205 144 -207 144 -208 468 -209 144 -213 144 -214 20 -216 144 -217 144 -218 20 -219 144 -221 144 -222 20 -223 144 -224 144 -226 20 -228 20 -229 144 -230 2100 -233 144 -235 20 -237 144 -238 144 -239 144 -24 144 -241 20 -242 144 -244 20 -247 20 -248 20 -249 20 -252 20 -255 144 -256 144 -257 20 -258 20 -26 144 -260 20 -262 20 -263 20 -265 144 -266 20 -27 20 -272 144 -273 468 -274 20 -275 20 -277 1088 -278 144 -28 20 -280 144 -281 144 -282 144 -283 20 -284 20 -285 20 -286 20 -287 20 -288 144 -289 20 -291 20 -292 20 -296 20 -298 468 -30 20 -302 20 -305 20 -306 20 -307 144 -308 20 -309 144 -310 20 -311 468 -315 20 -316 468 -317 144 -318 468 -321 144 -322 144 -323 20 -325 144 -327 468 -33 20 -331 144 -332 20 -333 144 -335 20 -336 20 -338 20 -339 20 -34 20 -341 20 -342 144 -344 144 -345 20 -348 2100 -35 468 -351 20 -353 144 -356 20 -360 20 -362 20 -364 20 -365 20 -366 20 -367 144 -368 20 -369 468 -37 144 -373 20 -374 20 -375 20 -377 20 -378 20 -379 20 -382 144 -384 468 -386 20 -389 20 -392 20 -393 20 -394 20 -395 144 -396 468 -397 144 -399 144 -4 20 -400 20 -401 2100 -402 20 -403 468 -404 144 -406 1088 -407 20 -409 468 -41 20 -411 20 -413 144 -414 144 -417 468 -418 20 -419 20 -42 144 -421 20 -424 144 -427 20 -429 144 -43 20 -430 468 -431 468 -432 20 -435 20 -436 20 -437 20 -438 468 -439 144 -44 20 -443 20 -444 20 -446 20 -448 20 -449 20 -452 20 -453 20 -454 468 -455 20 -457 20 -458 144 -459 144 -460 20 -462 144 -463 144 -466 468 -467 20 -468 1088 -469 2100 -47 20 -470 20 -472 20 -475 20 -477 20 -478 144 -479 20 -480 468 -481 20 -482 20 -483 20 -484 20 -485 20 -487 20 -489 1088 -490 20 -491 20 -492 144 -493 20 -494 20 -495 20 -496 20 -497 20 -498 468 -5 468 -51 144 -53 20 -54 20 -57 20 -58 144 -64 20 -65 20 -66 20 -67 144 -69 20 -70 468 -72 144 -74 20 -76 144 -77 20 -78 20 -8 20 -80 20 -82 20 -83 144 -84 144 -85 20 -86 20 -87 20 -9 20 -90 468 -92 20 -95 144 -96 20 -97 144 -98 144 diff --git a/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out b/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out index 02678fd..347c48c 100644 --- a/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out +++ b/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out @@ -126,13 +126,13 @@ POSTHOOK: Input: default@src #### A masked pattern was here #### 1028 PREHOOK: query: explain -select count(*) from (select x.key as key, y.value as value from +select key, count(*) from (select x.key as key, y.value as value from srcpart x join srcpart y on (x.key = y.key) union all select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value PREHOOK: type: QUERY POSTHOOK: query: explain -select count(*) from (select x.key as key, y.value as value from +select key, count(*) from (select x.key as key, y.value as value from srcpart x join srcpart y on (x.key = y.key) union all select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value @@ -155,55 +155,55 @@ STAGE PLANS: Map 1 Map Operator Tree: TableScan - alias: y - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE + alias: x + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE + predicate: key is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col1 (type: string) + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan - alias: x - Statistics: Num rows: 2000 Data size: 174000 Basic stats: COMPLETE Column stats: COMPLETE + alias: y + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 2000 Data size: 174000 Basic stats: COMPLETE Column stats: COMPLETE + predicate: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) - outputColumnNames: _col0 - Statistics: Num rows: 2000 Data size: 174000 Basic stats: COMPLETE Column stats: COMPLETE + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2000 Data size: 174000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) Map 7 Map Operator Tree: TableScan alias: z - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2000 Data size: 356000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2000 Data size: 37248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 21512 Data size: 3829136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 4200 Data size: 78220 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 8 Map Operator Tree: @@ -230,17 +230,17 @@ STAGE PLANS: keys: 0 _col0 (type: string) 1 _col0 (type: string) - outputColumnNames: _col1, _col2 - Statistics: Num rows: 19512 Data size: 3473136 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col2 + Statistics: Num rows: 2200 Data size: 40972 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: string), _col1 (type: string) + expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 19512 Data size: 3473136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2200 Data size: 40972 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 21512 Data size: 3829136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 4200 Data size: 78220 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 4 Reduce Operator Tree: @@ -251,18 +251,18 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 50261 Data size: 8946458 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 4620 Data size: 86042 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 25130 Data size: 4674180 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 4620 Data size: 86042 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 25130 Data size: 4674180 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 4620 Data size: 86042 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Reducer 5 Execution mode: vectorized @@ -272,14 +272,14 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 25130 Data size: 4674180 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2310 Data size: 43021 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col2 (type: bigint) - outputColumnNames: _col0 - Statistics: Num rows: 25130 Data size: 201040 Basic stats: COMPLETE Column stats: COMPLETE + expressions: _col0 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2310 Data size: 43021 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 25130 Data size: 201040 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2310 Data size: 43021 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -626,336 +626,3 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 96 20 97 144 98 144 -PREHOOK: query: select key, count(*) from (select x.key as key, y.value as value from -srcpart x join srcpart y on (x.key = y.key) -union all -select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value -PREHOOK: type: QUERY -PREHOOK: Input: default@src -PREHOOK: Input: default@srcpart -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 -PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -#### A masked pattern was here #### -POSTHOOK: query: select key, count(*) from (select x.key as key, y.value as value from -srcpart x join srcpart y on (x.key = y.key) -union all -select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -POSTHOOK: Input: default@srcpart -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 -POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -#### A masked pattern was here #### -0 468 -10 20 -100 144 -103 144 -104 144 -105 20 -11 20 -111 20 -113 144 -114 20 -116 20 -118 144 -119 468 -12 144 -120 144 -125 144 -126 20 -128 468 -129 144 -131 20 -133 20 -134 144 -136 20 -137 144 -138 1088 -143 20 -145 20 -146 144 -149 144 -15 144 -150 20 -152 144 -153 20 -155 20 -156 20 -157 20 -158 20 -160 20 -162 20 -163 20 -164 144 -165 144 -166 20 -167 468 -168 20 -169 1088 -17 20 -170 20 -172 144 -174 144 -175 144 -176 144 -177 20 -178 20 -179 144 -18 144 -180 20 -181 20 -183 20 -186 20 -187 468 -189 20 -19 20 -190 20 -191 144 -192 20 -193 468 -194 20 -195 144 -196 20 -197 144 -199 468 -2 20 -20 20 -200 144 -201 20 -202 20 -203 144 -205 144 -207 144 -208 468 -209 144 -213 144 -214 20 -216 144 -217 144 -218 20 -219 144 -221 144 -222 20 -223 144 -224 144 -226 20 -228 20 -229 144 -230 2100 -233 144 -235 20 -237 144 -238 144 -239 144 -24 144 -241 20 -242 144 -244 20 -247 20 -248 20 -249 20 -252 20 -255 144 -256 144 -257 20 -258 20 -26 144 -260 20 -262 20 -263 20 -265 144 -266 20 -27 20 -272 144 -273 468 -274 20 -275 20 -277 1088 -278 144 -28 20 -280 144 -281 144 -282 144 -283 20 -284 20 -285 20 -286 20 -287 20 -288 144 -289 20 -291 20 -292 20 -296 20 -298 468 -30 20 -302 20 -305 20 -306 20 -307 144 -308 20 -309 144 -310 20 -311 468 -315 20 -316 468 -317 144 -318 468 -321 144 -322 144 -323 20 -325 144 -327 468 -33 20 -331 144 -332 20 -333 144 -335 20 -336 20 -338 20 -339 20 -34 20 -341 20 -342 144 -344 144 -345 20 -348 2100 -35 468 -351 20 -353 144 -356 20 -360 20 -362 20 -364 20 -365 20 -366 20 -367 144 -368 20 -369 468 -37 144 -373 20 -374 20 -375 20 -377 20 -378 20 -379 20 -382 144 -384 468 -386 20 -389 20 -392 20 -393 20 -394 20 -395 144 -396 468 -397 144 -399 144 -4 20 -400 20 -401 2100 -402 20 -403 468 -404 144 -406 1088 -407 20 -409 468 -41 20 -411 20 -413 144 -414 144 -417 468 -418 20 -419 20 -42 144 -421 20 -424 144 -427 20 -429 144 -43 20 -430 468 -431 468 -432 20 -435 20 -436 20 -437 20 -438 468 -439 144 -44 20 -443 20 -444 20 -446 20 -448 20 -449 20 -452 20 -453 20 -454 468 -455 20 -457 20 -458 144 -459 144 -460 20 -462 144 -463 144 -466 468 -467 20 -468 1088 -469 2100 -47 20 -470 20 -472 20 -475 20 -477 20 -478 144 -479 20 -480 468 -481 20 -482 20 -483 20 -484 20 -485 20 -487 20 -489 1088 -490 20 -491 20 -492 144 -493 20 -494 20 -495 20 -496 20 -497 20 -498 468 -5 468 -51 144 -53 20 -54 20 -57 20 -58 144 -64 20 -65 20 -66 20 -67 144 -69 20 -70 468 -72 144 -74 20 -76 144 -77 20 -78 20 -8 20 -80 20 -82 20 -83 144 -84 144 -85 20 -86 20 -87 20 -9 20 -90 468 -92 20 -95 144 -96 20 -97 144 -98 144 diff --git a/ql/src/test/results/clientpositive/tez_join_hash.q.out b/ql/src/test/results/clientpositive/tez_join_hash.q.out new file mode 100644 index 0000000..6b366af --- /dev/null +++ b/ql/src/test/results/clientpositive/tez_join_hash.q.out @@ -0,0 +1,920 @@ +PREHOOK: query: -- SORT_QUERY_RESULTS + +create table orc_src (key string, value string) STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@orc_src +POSTHOOK: query: -- SORT_QUERY_RESULTS + +create table orc_src (key string, value string) STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@orc_src +PREHOOK: query: insert into table orc_src select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@orc_src +POSTHOOK: query: insert into table orc_src select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@orc_src +POSTHOOK: Lineage: orc_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: orc_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: explain +SELECT count(*) FROM src, orc_src where src.key=orc_src.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +SELECT count(*) FROM src, orc_src where src.key=orc_src.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + TableScan + alias: orc_src + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT count(*) FROM src, orc_src where src.key=orc_src.key +PREHOOK: type: QUERY +PREHOOK: Input: default@orc_src +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT count(*) FROM src, orc_src where src.key=orc_src.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@orc_src +POSTHOOK: Input: default@src +#### A masked pattern was here #### +1028 +PREHOOK: query: explain +select key, count(*) from (select x.key as key, y.value as value from +srcpart x join srcpart y on (x.key = y.key) +union all +select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value +PREHOOK: type: QUERY +POSTHOOK: query: explain +select key, count(*) from (select x.key as key, y.value as value from +srcpart x join srcpart y on (x.key = y.key) +union all +select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-12 is a root stage , consists of Stage-15, Stage-16, Stage-1 + Stage-15 has a backup stage: Stage-1 + Stage-10 depends on stages: Stage-15 + Stage-9 depends on stages: Stage-1, Stage-10, Stage-11 , consists of Stage-13, Stage-14, Stage-2 + Stage-13 has a backup stage: Stage-2 + Stage-7 depends on stages: Stage-13 + Stage-3 depends on stages: Stage-2, Stage-7, Stage-8 + Stage-14 has a backup stage: Stage-2 + Stage-8 depends on stages: Stage-14 + Stage-2 + Stage-16 has a backup stage: Stage-1 + Stage-11 depends on stages: Stage-16 + Stage-1 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-12 + Conditional Operator + + Stage: Stage-15 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0-subquery1:$hdt$_0-subquery1:$hdt$_1:x + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0-subquery1:$hdt$_0-subquery1:$hdt$_1:x + TableScan + alias: x + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + + Stage: Stage-10 + Map Reduce + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col1, _col2 + Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col2 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-9 + Conditional Operator + + Stage: Stage-13 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:b + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:b + TableScan + alias: b + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + + Stage: Stage-7 + Map Reduce + Map Operator Tree: + TableScan + Union + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + TableScan + alias: z + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string) + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string), KEY._col1 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 2310 Data size: 24541 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2310 Data size: 24541 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2310 Data size: 24541 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-14 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0-subquery2:$hdt$_0-subquery2:z + Fetch Operator + limit: -1 +#### A masked pattern was here #### + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0-subquery2:$hdt$_0-subquery2:z + TableScan + alias: z + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) +#### A masked pattern was here #### + TableScan + Union + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + + Stage: Stage-8 + Map Reduce + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Union + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: string) + sort order: + + Map-reduce partition columns: _col1 (type: string) + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string) + TableScan + alias: z + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: string) + sort order: + + Map-reduce partition columns: _col1 (type: string) + Statistics: Num rows: 4200 Data size: 44620 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string) + TableScan + alias: b + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4620 Data size: 49082 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-16 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0-subquery1:$hdt$_0-subquery1:$hdt$_0:y + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0-subquery1:$hdt$_0-subquery1:$hdt$_0:y + TableScan + alias: y + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + + Stage: Stage-11 + Map Reduce + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col1, _col2 + Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col2 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + TableScan + alias: x + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col1, _col2 + Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col2 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from (select x.key as key, y.value as value from +srcpart x join srcpart y on (x.key = y.key) +union all +select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from (select x.key as key, y.value as value from +srcpart x join srcpart y on (x.key = y.key) +union all +select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@srcpart +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 +POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 +#### A masked pattern was here #### +0 468 +10 20 +100 144 +103 144 +104 144 +105 20 +11 20 +111 20 +113 144 +114 20 +116 20 +118 144 +119 468 +12 144 +120 144 +125 144 +126 20 +128 468 +129 144 +131 20 +133 20 +134 144 +136 20 +137 144 +138 1088 +143 20 +145 20 +146 144 +149 144 +15 144 +150 20 +152 144 +153 20 +155 20 +156 20 +157 20 +158 20 +160 20 +162 20 +163 20 +164 144 +165 144 +166 20 +167 468 +168 20 +169 1088 +17 20 +170 20 +172 144 +174 144 +175 144 +176 144 +177 20 +178 20 +179 144 +18 144 +180 20 +181 20 +183 20 +186 20 +187 468 +189 20 +19 20 +190 20 +191 144 +192 20 +193 468 +194 20 +195 144 +196 20 +197 144 +199 468 +2 20 +20 20 +200 144 +201 20 +202 20 +203 144 +205 144 +207 144 +208 468 +209 144 +213 144 +214 20 +216 144 +217 144 +218 20 +219 144 +221 144 +222 20 +223 144 +224 144 +226 20 +228 20 +229 144 +230 2100 +233 144 +235 20 +237 144 +238 144 +239 144 +24 144 +241 20 +242 144 +244 20 +247 20 +248 20 +249 20 +252 20 +255 144 +256 144 +257 20 +258 20 +26 144 +260 20 +262 20 +263 20 +265 144 +266 20 +27 20 +272 144 +273 468 +274 20 +275 20 +277 1088 +278 144 +28 20 +280 144 +281 144 +282 144 +283 20 +284 20 +285 20 +286 20 +287 20 +288 144 +289 20 +291 20 +292 20 +296 20 +298 468 +30 20 +302 20 +305 20 +306 20 +307 144 +308 20 +309 144 +310 20 +311 468 +315 20 +316 468 +317 144 +318 468 +321 144 +322 144 +323 20 +325 144 +327 468 +33 20 +331 144 +332 20 +333 144 +335 20 +336 20 +338 20 +339 20 +34 20 +341 20 +342 144 +344 144 +345 20 +348 2100 +35 468 +351 20 +353 144 +356 20 +360 20 +362 20 +364 20 +365 20 +366 20 +367 144 +368 20 +369 468 +37 144 +373 20 +374 20 +375 20 +377 20 +378 20 +379 20 +382 144 +384 468 +386 20 +389 20 +392 20 +393 20 +394 20 +395 144 +396 468 +397 144 +399 144 +4 20 +400 20 +401 2100 +402 20 +403 468 +404 144 +406 1088 +407 20 +409 468 +41 20 +411 20 +413 144 +414 144 +417 468 +418 20 +419 20 +42 144 +421 20 +424 144 +427 20 +429 144 +43 20 +430 468 +431 468 +432 20 +435 20 +436 20 +437 20 +438 468 +439 144 +44 20 +443 20 +444 20 +446 20 +448 20 +449 20 +452 20 +453 20 +454 468 +455 20 +457 20 +458 144 +459 144 +460 20 +462 144 +463 144 +466 468 +467 20 +468 1088 +469 2100 +47 20 +470 20 +472 20 +475 20 +477 20 +478 144 +479 20 +480 468 +481 20 +482 20 +483 20 +484 20 +485 20 +487 20 +489 1088 +490 20 +491 20 +492 144 +493 20 +494 20 +495 20 +496 20 +497 20 +498 468 +5 468 +51 144 +53 20 +54 20 +57 20 +58 144 +64 20 +65 20 +66 20 +67 144 +69 20 +70 468 +72 144 +74 20 +76 144 +77 20 +78 20 +8 20 +80 20 +82 20 +83 144 +84 144 +85 20 +86 20 +87 20 +9 20 +90 468 +92 20 +95 144 +96 20 +97 144 +98 144