diff --git a/data/files/parquet_complex_types.txt b/data/files/parquet_complex_types.txt new file mode 100644 index 0000000000..077c91343a --- /dev/null +++ b/data/files/parquet_complex_types.txt @@ -0,0 +1,1025 @@ +1|k1:v1,k2:v2|100,101|1,abc1|0 +2|k3:v3,k4:v4|102,103|2,abc2|1 +3|k5:v5,k6:v6|104,105|3,abc3|0 +4|k7:v7,k8:v8|106,107|4,abc4|1 +5|k9:v9,k10:v10|108,109|5,abc5|0 +6|k11:v11,k12:v12|110,111|6,abc6|1 +7|k13:v13,k14:v14|112,113|7,abc7|0 +8|k15:v15,k16:v16|114,115|8,abc8|1 +9|k17:v17,k18:v18|116,117|9,abc9|0 +10|k19:v19,k20:v20|118,119|10,abc10|1 +11|k21:v21,k22:v22|120,121|11,abc11|0 +12|k23:v23,k24:v24|122,123|12,abc12|1 +13|k25:v25,k26:v26|124,125|13,abc13|0 +14|k27:v27,k28:v28|126,127|14,abc14|1 +15|k29:v29,k30:v30|128,129|15,abc15|0 +16|k31:v31,k32:v32|130,131|16,abc16|1 +17|k33:v33,k34:v34|132,133|17,abc17|0 +18|k35:v35,k36:v36|134,135|18,abc18|1 +19|k37:v37,k38:v38|136,137|19,abc19|0 +20|k39:v39,k40:v40|138,139|20,abc20|1 +21|k41:v41,k42:v42|140,141|21,abc21|0 +22|k43:v43,k44:v44|142,143|22,abc22|1 +23|k45:v45,k46:v46|144,145|23,abc23|0 +24|k47:v47,k48:v48|146,147|24,abc24|1 +25|k49:v49,k50:v50|148,149|25,abc25|0 +26|k51:v51,k52:v52|150,151|26,abc26|1 +27|k53:v53,k54:v54|152,153|27,abc27|0 +28|k55:v55,k56:v56|154,155|28,abc28|1 +29|k57:v57,k58:v58|156,157|29,abc29|0 +30|k59:v59,k60:v60|158,159|30,abc30|1 +31|k61:v61,k62:v62|160,161|31,abc31|0 +32|k63:v63,k64:v64|162,163|32,abc32|1 +33|k65:v65,k66:v66|164,165|33,abc33|0 +34|k67:v67,k68:v68|166,167|34,abc34|1 +35|k69:v69,k70:v70|168,169|35,abc35|0 +36|k71:v71,k72:v72|170,171|36,abc36|1 +37|k73:v73,k74:v74|172,173|37,abc37|0 +38|k75:v75,k76:v76|174,175|38,abc38|1 +39|k77:v77,k78:v78|176,177|39,abc39|0 +40|k79:v79,k80:v80|178,179|40,abc40|1 +41|k81:v81,k82:v82|180,181|41,abc41|0 +42|k83:v83,k84:v84|182,183|42,abc42|1 +43|k85:v85,k86:v86|184,185|43,abc43|0 +44|k87:v87,k88:v88|186,187|44,abc44|1 +45|k89:v89,k90:v90|188,189|45,abc45|0 +46|k91:v91,k92:v92|190,191|46,abc46|1 +47|k93:v93,k94:v94|192,193|47,abc47|0 +48|k95:v95,k96:v96|194,195|48,abc48|1 +49|k97:v97,k98:v98|196,197|49,abc49|0 +50|k99:v99,k100:v100|198,199|50,abc50|1 +51|k101:v101,k102:v102|200,201|51,abc51|0 +52|k103:v103,k104:v104|202,203|52,abc52|1 +53|k105:v105,k106:v106|204,205|53,abc53|0 +54|k107:v107,k108:v108|206,207|54,abc54|1 +55|k109:v109,k110:v110|208,209|55,abc55|0 +56|k111:v111,k112:v112|210,211|56,abc56|1 +57|k113:v113,k114:v114|212,213|57,abc57|0 +58|k115:v115,k116:v116|214,215|58,abc58|1 +59|k117:v117,k118:v118|216,217|59,abc59|0 +60|k119:v119,k120:v120|218,219|60,abc60|1 +61|k121:v121,k122:v122|220,221|61,abc61|0 +62|k123:v123,k124:v124|222,223|62,abc62|1 +63|k125:v125,k126:v126|224,225|63,abc63|0 +64|k127:v127,k128:v128|226,227|64,abc64|1 +65|k129:v129,k130:v130|228,229|65,abc65|0 +66|k131:v131,k132:v132|230,231|66,abc66|1 +67|k133:v133,k134:v134|232,233|67,abc67|0 +68|k135:v135,k136:v136|234,235|68,abc68|1 +69|k137:v137,k138:v138|236,237|69,abc69|0 +70|k139:v139,k140:v140|238,239|70,abc70|1 +71|k141:v141,k142:v142|240,241|71,abc71|0 +72|k143:v143,k144:v144|242,243|72,abc72|1 +73|k145:v145,k146:v146|244,245|73,abc73|0 +74|k147:v147,k148:v148|246,247|74,abc74|1 +75|k149:v149,k150:v150|248,249|75,abc75|0 +76|k151:v151,k152:v152|250,251|76,abc76|1 +77|k153:v153,k154:v154|252,253|77,abc77|0 +78|k155:v155,k156:v156|254,255|78,abc78|1 +79|k157:v157,k158:v158|256,257|79,abc79|0 +80|k159:v159,k160:v160|258,259|80,abc80|1 +81|k161:v161,k162:v162|260,261|81,abc81|0 +82|k163:v163,k164:v164|262,263|82,abc82|1 +83|k165:v165,k166:v166|264,265|83,abc83|0 +84|k167:v167,k168:v168|266,267|84,abc84|1 +85|k169:v169,k170:v170|268,269|85,abc85|0 +86|k171:v171,k172:v172|270,271|86,abc86|1 +87|k173:v173,k174:v174|272,273|87,abc87|0 +88|k175:v175,k176:v176|274,275|88,abc88|1 +89|k177:v177,k178:v178|276,277|89,abc89|0 +90|k179:v179,k180:v180|278,279|90,abc90|1 +91|k181:v181,k182:v182|280,281|91,abc91|0 +92|k183:v183,k184:v184|282,283|92,abc92|1 +93|k185:v185,k186:v186|284,285|93,abc93|0 +94|k187:v187,k188:v188|286,287|94,abc94|1 +95|k189:v189,k190:v190|288,289|95,abc95|0 +96|k191:v191,k192:v192|290,291|96,abc96|1 +97|k193:v193,k194:v194|292,293|97,abc97|0 +98|k195:v195,k196:v196|294,295|98,abc98|1 +99|k197:v197,k198:v198|296,297|99,abc99|0 +100|k199:v199,k200:v200|298,299|100,abc100|1 +101|k201:v201,k202:v202|300,301|101,abc101|0 +102|k203:v203,k204:v204|302,303|102,abc102|1 +103|k205:v205,k206:v206|304,305|103,abc103|0 +104|k207:v207,k208:v208|306,307|104,abc104|1 +105|k209:v209,k210:v210|308,309|105,abc105|0 +106|k211:v211,k212:v212|310,311|106,abc106|1 +107|k213:v213,k214:v214|312,313|107,abc107|0 +108|k215:v215,k216:v216|314,315|108,abc108|1 +109|k217:v217,k218:v218|316,317|109,abc109|0 +110|k219:v219,k220:v220|318,319|110,abc110|1 +111|k221:v221,k222:v222|320,321|111,abc111|0 +112|k223:v223,k224:v224|322,323|112,abc112|1 +113|k225:v225,k226:v226|324,325|113,abc113|0 +114|k227:v227,k228:v228|326,327|114,abc114|1 +115|k229:v229,k230:v230|328,329|115,abc115|0 +116|k231:v231,k232:v232|330,331|116,abc116|1 +117|k233:v233,k234:v234|332,333|117,abc117|0 +118|k235:v235,k236:v236|334,335|118,abc118|1 +119|k237:v237,k238:v238|336,337|119,abc119|0 +120|k239:v239,k240:v240|338,339|120,abc120|1 +121|k241:v241,k242:v242|340,341|121,abc121|0 +122|k243:v243,k244:v244|342,343|122,abc122|1 +123|k245:v245,k246:v246|344,345|123,abc123|0 +124|k247:v247,k248:v248|346,347|124,abc124|1 +125|k249:v249,k250:v250|348,349|125,abc125|0 +126|k251:v251,k252:v252|350,351|126,abc126|1 +127|k253:v253,k254:v254|352,353|127,abc127|0 +128|k255:v255,k256:v256|354,355|128,abc128|1 +129|k257:v257,k258:v258|356,357|129,abc129|0 +130|k259:v259,k260:v260|358,359|130,abc130|1 +131|k261:v261,k262:v262|360,361|131,abc131|0 +132|k263:v263,k264:v264|362,363|132,abc132|1 +133|k265:v265,k266:v266|364,365|133,abc133|0 +134|k267:v267,k268:v268|366,367|134,abc134|1 +135|k269:v269,k270:v270|368,369|135,abc135|0 +136|k271:v271,k272:v272|370,371|136,abc136|1 +137|k273:v273,k274:v274|372,373|137,abc137|0 +138|k275:v275,k276:v276|374,375|138,abc138|1 +139|k277:v277,k278:v278|376,377|139,abc139|0 +140|k279:v279,k280:v280|378,379|140,abc140|1 +141|k281:v281,k282:v282|380,381|141,abc141|0 +142|k283:v283,k284:v284|382,383|142,abc142|1 +143|k285:v285,k286:v286|384,385|143,abc143|0 +144|k287:v287,k288:v288|386,387|144,abc144|1 +145|k289:v289,k290:v290|388,389|145,abc145|0 +146|k291:v291,k292:v292|390,391|146,abc146|1 +147|k293:v293,k294:v294|392,393|147,abc147|0 +148|k295:v295,k296:v296|394,395|148,abc148|1 +149|k297:v297,k298:v298|396,397|149,abc149|0 +150|k299:v299,k300:v300|398,399|150,abc150|1 +151|k301:v301,k302:v302|400,401|151,abc151|0 +152|k303:v303,k304:v304|402,403|152,abc152|1 +153|k305:v305,k306:v306|404,405|153,abc153|0 +154|k307:v307,k308:v308|406,407|154,abc154|1 +155|k309:v309,k310:v310|408,409|155,abc155|0 +156|k311:v311,k312:v312|410,411|156,abc156|1 +157|k313:v313,k314:v314|412,413|157,abc157|0 +158|k315:v315,k316:v316|414,415|158,abc158|1 +159|k317:v317,k318:v318|416,417|159,abc159|0 +160|k319:v319,k320:v320|418,419|160,abc160|1 +161|k321:v321,k322:v322|420,421|161,abc161|0 +162|k323:v323,k324:v324|422,423|162,abc162|1 +163|k325:v325,k326:v326|424,425|163,abc163|0 +164|k327:v327,k328:v328|426,427|164,abc164|1 +165|k329:v329,k330:v330|428,429|165,abc165|0 +166|k331:v331,k332:v332|430,431|166,abc166|1 +167|k333:v333,k334:v334|432,433|167,abc167|0 +168|k335:v335,k336:v336|434,435|168,abc168|1 +169|k337:v337,k338:v338|436,437|169,abc169|0 +170|k339:v339,k340:v340|438,439|170,abc170|1 +171|k341:v341,k342:v342|440,441|171,abc171|0 +172|k343:v343,k344:v344|442,443|172,abc172|1 +173|k345:v345,k346:v346|444,445|173,abc173|0 +174|k347:v347,k348:v348|446,447|174,abc174|1 +175|k349:v349,k350:v350|448,449|175,abc175|0 +176|k351:v351,k352:v352|450,451|176,abc176|1 +177|k353:v353,k354:v354|452,453|177,abc177|0 +178|k355:v355,k356:v356|454,455|178,abc178|1 +179|k357:v357,k358:v358|456,457|179,abc179|0 +180|k359:v359,k360:v360|458,459|180,abc180|1 +181|k361:v361,k362:v362|460,461|181,abc181|0 +182|k363:v363,k364:v364|462,463|182,abc182|1 +183|k365:v365,k366:v366|464,465|183,abc183|0 +184|k367:v367,k368:v368|466,467|184,abc184|1 +185|k369:v369,k370:v370|468,469|185,abc185|0 +186|k371:v371,k372:v372|470,471|186,abc186|1 +187|k373:v373,k374:v374|472,473|187,abc187|0 +188|k375:v375,k376:v376|474,475|188,abc188|1 +189|k377:v377,k378:v378|476,477|189,abc189|0 +190|k379:v379,k380:v380|478,479|190,abc190|1 +191|k381:v381,k382:v382|480,481|191,abc191|0 +192|k383:v383,k384:v384|482,483|192,abc192|1 +193|k385:v385,k386:v386|484,485|193,abc193|0 +194|k387:v387,k388:v388|486,487|194,abc194|1 +195|k389:v389,k390:v390|488,489|195,abc195|0 +196|k391:v391,k392:v392|490,491|196,abc196|1 +197|k393:v393,k394:v394|492,493|197,abc197|0 +198|k395:v395,k396:v396|494,495|198,abc198|1 +199|k397:v397,k398:v398|496,497|199,abc199|0 +200|k399:v399,k400:v400|498,499|200,abc200|1 +201|k401:v401,k402:v402|500,501|201,abc201|0 +202|k403:v403,k404:v404|502,503|202,abc202|1 +203|k405:v405,k406:v406|504,505|203,abc203|0 +204|k407:v407,k408:v408|506,507|204,abc204|1 +205|k409:v409,k410:v410|508,509|205,abc205|0 +206|k411:v411,k412:v412|510,511|206,abc206|1 +207|k413:v413,k414:v414|512,513|207,abc207|0 +208|k415:v415,k416:v416|514,515|208,abc208|1 +209|k417:v417,k418:v418|516,517|209,abc209|0 +210|k419:v419,k420:v420|518,519|210,abc210|1 +211|k421:v421,k422:v422|520,521|211,abc211|0 +212|k423:v423,k424:v424|522,523|212,abc212|1 +213|k425:v425,k426:v426|524,525|213,abc213|0 +214|k427:v427,k428:v428|526,527|214,abc214|1 +215|k429:v429,k430:v430|528,529|215,abc215|0 +216|k431:v431,k432:v432|530,531|216,abc216|1 +217|k433:v433,k434:v434|532,533|217,abc217|0 +218|k435:v435,k436:v436|534,535|218,abc218|1 +219|k437:v437,k438:v438|536,537|219,abc219|0 +220|k439:v439,k440:v440|538,539|220,abc220|1 +221|k441:v441,k442:v442|540,541|221,abc221|0 +222|k443:v443,k444:v444|542,543|222,abc222|1 +223|k445:v445,k446:v446|544,545|223,abc223|0 +224|k447:v447,k448:v448|546,547|224,abc224|1 +225|k449:v449,k450:v450|548,549|225,abc225|0 +226|k451:v451,k452:v452|550,551|226,abc226|1 +227|k453:v453,k454:v454|552,553|227,abc227|0 +228|k455:v455,k456:v456|554,555|228,abc228|1 +229|k457:v457,k458:v458|556,557|229,abc229|0 +230|k459:v459,k460:v460|558,559|230,abc230|1 +231|k461:v461,k462:v462|560,561|231,abc231|0 +232|k463:v463,k464:v464|562,563|232,abc232|1 +233|k465:v465,k466:v466|564,565|233,abc233|0 +234|k467:v467,k468:v468|566,567|234,abc234|1 +235|k469:v469,k470:v470|568,569|235,abc235|0 +236|k471:v471,k472:v472|570,571|236,abc236|1 +237|k473:v473,k474:v474|572,573|237,abc237|0 +238|k475:v475,k476:v476|574,575|238,abc238|1 +239|k477:v477,k478:v478|576,577|239,abc239|0 +240|k479:v479,k480:v480|578,579|240,abc240|1 +241|k481:v481,k482:v482|580,581|241,abc241|0 +242|k483:v483,k484:v484|582,583|242,abc242|1 +243|k485:v485,k486:v486|584,585|243,abc243|0 +244|k487:v487,k488:v488|586,587|244,abc244|1 +245|k489:v489,k490:v490|588,589|245,abc245|0 +246|k491:v491,k492:v492|590,591|246,abc246|1 +247|k493:v493,k494:v494|592,593|247,abc247|0 +248|k495:v495,k496:v496|594,595|248,abc248|1 +249|k497:v497,k498:v498|596,597|249,abc249|0 +250|k499:v499,k500:v500|598,599|250,abc250|1 +251|k501:v501,k502:v502|600,601|251,abc251|0 +252|k503:v503,k504:v504|602,603|252,abc252|1 +253|k505:v505,k506:v506|604,605|253,abc253|0 +254|k507:v507,k508:v508|606,607|254,abc254|1 +255|k509:v509,k510:v510|608,609|255,abc255|0 +256|k511:v511,k512:v512|610,611|256,abc256|1 +257|k513:v513,k514:v514|612,613|257,abc257|0 +258|k515:v515,k516:v516|614,615|258,abc258|1 +259|k517:v517,k518:v518|616,617|259,abc259|0 +260|k519:v519,k520:v520|618,619|260,abc260|1 +261|k521:v521,k522:v522|620,621|261,abc261|0 +262|k523:v523,k524:v524|622,623|262,abc262|1 +263|k525:v525,k526:v526|624,625|263,abc263|0 +264|k527:v527,k528:v528|626,627|264,abc264|1 +265|k529:v529,k530:v530|628,629|265,abc265|0 +266|k531:v531,k532:v532|630,631|266,abc266|1 +267|k533:v533,k534:v534|632,633|267,abc267|0 +268|k535:v535,k536:v536|634,635|268,abc268|1 +269|k537:v537,k538:v538|636,637|269,abc269|0 +270|k539:v539,k540:v540|638,639|270,abc270|1 +271|k541:v541,k542:v542|640,641|271,abc271|0 +272|k543:v543,k544:v544|642,643|272,abc272|1 +273|k545:v545,k546:v546|644,645|273,abc273|0 +274|k547:v547,k548:v548|646,647|274,abc274|1 +275|k549:v549,k550:v550|648,649|275,abc275|0 +276|k551:v551,k552:v552|650,651|276,abc276|1 +277|k553:v553,k554:v554|652,653|277,abc277|0 +278|k555:v555,k556:v556|654,655|278,abc278|1 +279|k557:v557,k558:v558|656,657|279,abc279|0 +280|k559:v559,k560:v560|658,659|280,abc280|1 +281|k561:v561,k562:v562|660,661|281,abc281|0 +282|k563:v563,k564:v564|662,663|282,abc282|1 +283|k565:v565,k566:v566|664,665|283,abc283|0 +284|k567:v567,k568:v568|666,667|284,abc284|1 +285|k569:v569,k570:v570|668,669|285,abc285|0 +286|k571:v571,k572:v572|670,671|286,abc286|1 +287|k573:v573,k574:v574|672,673|287,abc287|0 +288|k575:v575,k576:v576|674,675|288,abc288|1 +289|k577:v577,k578:v578|676,677|289,abc289|0 +290|k579:v579,k580:v580|678,679|290,abc290|1 +291|k581:v581,k582:v582|680,681|291,abc291|0 +292|k583:v583,k584:v584|682,683|292,abc292|1 +293|k585:v585,k586:v586|684,685|293,abc293|0 +294|k587:v587,k588:v588|686,687|294,abc294|1 +295|k589:v589,k590:v590|688,689|295,abc295|0 +296|k591:v591,k592:v592|690,691|296,abc296|1 +297|k593:v593,k594:v594|692,693|297,abc297|0 +298|k595:v595,k596:v596|694,695|298,abc298|1 +299|k597:v597,k598:v598|696,697|299,abc299|0 +300|k599:v599,k600:v600|698,699|300,abc300|1 +301|k601:v601,k602:v602|700,701|301,abc301|0 +302|k603:v603,k604:v604|702,703|302,abc302|1 +303|k605:v605,k606:v606|704,705|303,abc303|0 +304|k607:v607,k608:v608|706,707|304,abc304|1 +305|k609:v609,k610:v610|708,709|305,abc305|0 +306|k611:v611,k612:v612|710,711|306,abc306|1 +307|k613:v613,k614:v614|712,713|307,abc307|0 +308|k615:v615,k616:v616|714,715|308,abc308|1 +309|k617:v617,k618:v618|716,717|309,abc309|0 +310|k619:v619,k620:v620|718,719|310,abc310|1 +311|k621:v621,k622:v622|720,721|311,abc311|0 +312|k623:v623,k624:v624|722,723|312,abc312|1 +313|k625:v625,k626:v626|724,725|313,abc313|0 +314|k627:v627,k628:v628|726,727|314,abc314|1 +315|k629:v629,k630:v630|728,729|315,abc315|0 +316|k631:v631,k632:v632|730,731|316,abc316|1 +317|k633:v633,k634:v634|732,733|317,abc317|0 +318|k635:v635,k636:v636|734,735|318,abc318|1 +319|k637:v637,k638:v638|736,737|319,abc319|0 +320|k639:v639,k640:v640|738,739|320,abc320|1 +321|k641:v641,k642:v642|740,741|321,abc321|0 +322|k643:v643,k644:v644|742,743|322,abc322|1 +323|k645:v645,k646:v646|744,745|323,abc323|0 +324|k647:v647,k648:v648|746,747|324,abc324|1 +325|k649:v649,k650:v650|748,749|325,abc325|0 +326|k651:v651,k652:v652|750,751|326,abc326|1 +327|k653:v653,k654:v654|752,753|327,abc327|0 +328|k655:v655,k656:v656|754,755|328,abc328|1 +329|k657:v657,k658:v658|756,757|329,abc329|0 +330|k659:v659,k660:v660|758,759|330,abc330|1 +331|k661:v661,k662:v662|760,761|331,abc331|0 +332|k663:v663,k664:v664|762,763|332,abc332|1 +333|k665:v665,k666:v666|764,765|333,abc333|0 +334|k667:v667,k668:v668|766,767|334,abc334|1 +335|k669:v669,k670:v670|768,769|335,abc335|0 +336|k671:v671,k672:v672|770,771|336,abc336|1 +337|k673:v673,k674:v674|772,773|337,abc337|0 +338|k675:v675,k676:v676|774,775|338,abc338|1 +339|k677:v677,k678:v678|776,777|339,abc339|0 +340|k679:v679,k680:v680|778,779|340,abc340|1 +341|k681:v681,k682:v682|780,781|341,abc341|0 +342|k683:v683,k684:v684|782,783|342,abc342|1 +343|k685:v685,k686:v686|784,785|343,abc343|0 +344|k687:v687,k688:v688|786,787|344,abc344|1 +345|k689:v689,k690:v690|788,789|345,abc345|0 +346|k691:v691,k692:v692|790,791|346,abc346|1 +347|k693:v693,k694:v694|792,793|347,abc347|0 +348|k695:v695,k696:v696|794,795|348,abc348|1 +349|k697:v697,k698:v698|796,797|349,abc349|0 +350|k699:v699,k700:v700|798,799|350,abc350|1 +351|k701:v701,k702:v702|800,801|351,abc351|0 +352|k703:v703,k704:v704|802,803|352,abc352|1 +353|k705:v705,k706:v706|804,805|353,abc353|0 +354|k707:v707,k708:v708|806,807|354,abc354|1 +355|k709:v709,k710:v710|808,809|355,abc355|0 +356|k711:v711,k712:v712|810,811|356,abc356|1 +357|k713:v713,k714:v714|812,813|357,abc357|0 +358|k715:v715,k716:v716|814,815|358,abc358|1 +359|k717:v717,k718:v718|816,817|359,abc359|0 +360|k719:v719,k720:v720|818,819|360,abc360|1 +361|k721:v721,k722:v722|820,821|361,abc361|0 +362|k723:v723,k724:v724|822,823|362,abc362|1 +363|k725:v725,k726:v726|824,825|363,abc363|0 +364|k727:v727,k728:v728|826,827|364,abc364|1 +365|k729:v729,k730:v730|828,829|365,abc365|0 +366|k731:v731,k732:v732|830,831|366,abc366|1 +367|k733:v733,k734:v734|832,833|367,abc367|0 +368|k735:v735,k736:v736|834,835|368,abc368|1 +369|k737:v737,k738:v738|836,837|369,abc369|0 +370|k739:v739,k740:v740|838,839|370,abc370|1 +371|k741:v741,k742:v742|840,841|371,abc371|0 +372|k743:v743,k744:v744|842,843|372,abc372|1 +373|k745:v745,k746:v746|844,845|373,abc373|0 +374|k747:v747,k748:v748|846,847|374,abc374|1 +375|k749:v749,k750:v750|848,849|375,abc375|0 +376|k751:v751,k752:v752|850,851|376,abc376|1 +377|k753:v753,k754:v754|852,853|377,abc377|0 +378|k755:v755,k756:v756|854,855|378,abc378|1 +379|k757:v757,k758:v758|856,857|379,abc379|0 +380|k759:v759,k760:v760|858,859|380,abc380|1 +381|k761:v761,k762:v762|860,861|381,abc381|0 +382|k763:v763,k764:v764|862,863|382,abc382|1 +383|k765:v765,k766:v766|864,865|383,abc383|0 +384|k767:v767,k768:v768|866,867|384,abc384|1 +385|k769:v769,k770:v770|868,869|385,abc385|0 +386|k771:v771,k772:v772|870,871|386,abc386|1 +387|k773:v773,k774:v774|872,873|387,abc387|0 +388|k775:v775,k776:v776|874,875|388,abc388|1 +389|k777:v777,k778:v778|876,877|389,abc389|0 +390|k779:v779,k780:v780|878,879|390,abc390|1 +391|k781:v781,k782:v782|880,881|391,abc391|0 +392|k783:v783,k784:v784|882,883|392,abc392|1 +393|k785:v785,k786:v786|884,885|393,abc393|0 +394|k787:v787,k788:v788|886,887|394,abc394|1 +395|k789:v789,k790:v790|888,889|395,abc395|0 +396|k791:v791,k792:v792|890,891|396,abc396|1 +397|k793:v793,k794:v794|892,893|397,abc397|0 +398|k795:v795,k796:v796|894,895|398,abc398|1 +399|k797:v797,k798:v798|896,897|399,abc399|0 +400|k799:v799,k800:v800|898,899|400,abc400|1 +401|k801:v801,k802:v802|900,901|401,abc401|0 +402|k803:v803,k804:v804|902,903|402,abc402|1 +403|k805:v805,k806:v806|904,905|403,abc403|0 +404|k807:v807,k808:v808|906,907|404,abc404|1 +405|k809:v809,k810:v810|908,909|405,abc405|0 +406|k811:v811,k812:v812|910,911|406,abc406|1 +407|k813:v813,k814:v814|912,913|407,abc407|0 +408|k815:v815,k816:v816|914,915|408,abc408|1 +409|k817:v817,k818:v818|916,917|409,abc409|0 +410|k819:v819,k820:v820|918,919|410,abc410|1 +411|k821:v821,k822:v822|920,921|411,abc411|0 +412|k823:v823,k824:v824|922,923|412,abc412|1 +413|k825:v825,k826:v826|924,925|413,abc413|0 +414|k827:v827,k828:v828|926,927|414,abc414|1 +415|k829:v829,k830:v830|928,929|415,abc415|0 +416|k831:v831,k832:v832|930,931|416,abc416|1 +417|k833:v833,k834:v834|932,933|417,abc417|0 +418|k835:v835,k836:v836|934,935|418,abc418|1 +419|k837:v837,k838:v838|936,937|419,abc419|0 +420|k839:v839,k840:v840|938,939|420,abc420|1 +421|k841:v841,k842:v842|940,941|421,abc421|0 +422|k843:v843,k844:v844|942,943|422,abc422|1 +423|k845:v845,k846:v846|944,945|423,abc423|0 +424|k847:v847,k848:v848|946,947|424,abc424|1 +425|k849:v849,k850:v850|948,949|425,abc425|0 +426|k851:v851,k852:v852|950,951|426,abc426|1 +427|k853:v853,k854:v854|952,953|427,abc427|0 +428|k855:v855,k856:v856|954,955|428,abc428|1 +429|k857:v857,k858:v858|956,957|429,abc429|0 +430|k859:v859,k860:v860|958,959|430,abc430|1 +431|k861:v861,k862:v862|960,961|431,abc431|0 +432|k863:v863,k864:v864|962,963|432,abc432|1 +433|k865:v865,k866:v866|964,965|433,abc433|0 +434|k867:v867,k868:v868|966,967|434,abc434|1 +435|k869:v869,k870:v870|968,969|435,abc435|0 +436|k871:v871,k872:v872|970,971|436,abc436|1 +437|k873:v873,k874:v874|972,973|437,abc437|0 +438|k875:v875,k876:v876|974,975|438,abc438|1 +439|k877:v877,k878:v878|976,977|439,abc439|0 +440|k879:v879,k880:v880|978,979|440,abc440|1 +441|k881:v881,k882:v882|980,981|441,abc441|0 +442|k883:v883,k884:v884|982,983|442,abc442|1 +443|k885:v885,k886:v886|984,985|443,abc443|0 +444|k887:v887,k888:v888|986,987|444,abc444|1 +445|k889:v889,k890:v890|988,989|445,abc445|0 +446|k891:v891,k892:v892|990,991|446,abc446|1 +447|k893:v893,k894:v894|992,993|447,abc447|0 +448|k895:v895,k896:v896|994,995|448,abc448|1 +449|k897:v897,k898:v898|996,997|449,abc449|0 +450|k899:v899,k900:v900|998,999|450,abc450|1 +451|k901:v901,k902:v902|1000,1001|451,abc451|0 +452|k903:v903,k904:v904|1002,1003|452,abc452|1 +453|k905:v905,k906:v906|1004,1005|453,abc453|0 +454|k907:v907,k908:v908|1006,1007|454,abc454|1 +455|k909:v909,k910:v910|1008,1009|455,abc455|0 +456|k911:v911,k912:v912|1010,1011|456,abc456|1 +457|k913:v913,k914:v914|1012,1013|457,abc457|0 +458|k915:v915,k916:v916|1014,1015|458,abc458|1 +459|k917:v917,k918:v918|1016,1017|459,abc459|0 +460|k919:v919,k920:v920|1018,1019|460,abc460|1 +461|k921:v921,k922:v922|1020,1021|461,abc461|0 +462|k923:v923,k924:v924|1022,1023|462,abc462|1 +463|k925:v925,k926:v926|1024,1025|463,abc463|0 +464|k927:v927,k928:v928|1026,1027|464,abc464|1 +465|k929:v929,k930:v930|1028,1029|465,abc465|0 +466|k931:v931,k932:v932|1030,1031|466,abc466|1 +467|k933:v933,k934:v934|1032,1033|467,abc467|0 +468|k935:v935,k936:v936|1034,1035|468,abc468|1 +469|k937:v937,k938:v938|1036,1037|469,abc469|0 +470|k939:v939,k940:v940|1038,1039|470,abc470|1 +471|k941:v941,k942:v942|1040,1041|471,abc471|0 +472|k943:v943,k944:v944|1042,1043|472,abc472|1 +473|k945:v945,k946:v946|1044,1045|473,abc473|0 +474|k947:v947,k948:v948|1046,1047|474,abc474|1 +475|k949:v949,k950:v950|1048,1049|475,abc475|0 +476|k951:v951,k952:v952|1050,1051|476,abc476|1 +477|k953:v953,k954:v954|1052,1053|477,abc477|0 +478|k955:v955,k956:v956|1054,1055|478,abc478|1 +479|k957:v957,k958:v958|1056,1057|479,abc479|0 +480|k959:v959,k960:v960|1058,1059|480,abc480|1 +481|k961:v961,k962:v962|1060,1061|481,abc481|0 +482|k963:v963,k964:v964|1062,1063|482,abc482|1 +483|k965:v965,k966:v966|1064,1065|483,abc483|0 +484|k967:v967,k968:v968|1066,1067|484,abc484|1 +485|k969:v969,k970:v970|1068,1069|485,abc485|0 +486|k971:v971,k972:v972|1070,1071|486,abc486|1 +487|k973:v973,k974:v974|1072,1073|487,abc487|0 +488|k975:v975,k976:v976|1074,1075|488,abc488|1 +489|k977:v977,k978:v978|1076,1077|489,abc489|0 +490|k979:v979,k980:v980|1078,1079|490,abc490|1 +491|k981:v981,k982:v982|1080,1081|491,abc491|0 +492|k983:v983,k984:v984|1082,1083|492,abc492|1 +493|k985:v985,k986:v986|1084,1085|493,abc493|0 +494|k987:v987,k988:v988|1086,1087|494,abc494|1 +495|k989:v989,k990:v990|1088,1089|495,abc495|0 +496|k991:v991,k992:v992|1090,1091|496,abc496|1 +497|k993:v993,k994:v994|1092,1093|497,abc497|0 +498|k995:v995,k996:v996|1094,1095|498,abc498|1 +499|k997:v997,k998:v998|1096,1097|499,abc499|0 +500|k999:v999,k1000:v1000|1098,1099|500,abc500|1 +501|k1001:v1001,k1002:v1002|1100,1101|501,abc501|0 +502|k1003:v1003,k1004:v1004|1102,1103|502,abc502|1 +503|k1005:v1005,k1006:v1006|1104,1105|503,abc503|0 +504|k1007:v1007,k1008:v1008|1106,1107|504,abc504|1 +505|k1009:v1009,k1010:v1010|1108,1109|505,abc505|0 +506|k1011:v1011,k1012:v1012|1110,1111|506,abc506|1 +507|k1013:v1013,k1014:v1014|1112,1113|507,abc507|0 +508|k1015:v1015,k1016:v1016|1114,1115|508,abc508|1 +509|k1017:v1017,k1018:v1018|1116,1117|509,abc509|0 +510|k1019:v1019,k1020:v1020|1118,1119|510,abc510|1 +511|k1021:v1021,k1022:v1022|1120,1121|511,abc511|0 +512|k1023:v1023,k1024:v1024|1122,1123|512,abc512|1 +513|k1025:v1025,k1026:v1026|1124,1125|513,abc513|0 +514|k1027:v1027,k1028:v1028|1126,1127|514,abc514|1 +515|k1029:v1029,k1030:v1030|1128,1129|515,abc515|0 +516|k1031:v1031,k1032:v1032|1130,1131|516,abc516|1 +517|k1033:v1033,k1034:v1034|1132,1133|517,abc517|0 +518|k1035:v1035,k1036:v1036|1134,1135|518,abc518|1 +519|k1037:v1037,k1038:v1038|1136,1137|519,abc519|0 +520|k1039:v1039,k1040:v1040|1138,1139|520,abc520|1 +521|k1041:v1041,k1042:v1042|1140,1141|521,abc521|0 +522|k1043:v1043,k1044:v1044|1142,1143|522,abc522|1 +523|k1045:v1045,k1046:v1046|1144,1145|523,abc523|0 +524|k1047:v1047,k1048:v1048|1146,1147|524,abc524|1 +525|k1049:v1049,k1050:v1050|1148,1149|525,abc525|0 +526|k1051:v1051,k1052:v1052|1150,1151|526,abc526|1 +527|k1053:v1053,k1054:v1054|1152,1153|527,abc527|0 +528|k1055:v1055,k1056:v1056|1154,1155|528,abc528|1 +529|k1057:v1057,k1058:v1058|1156,1157|529,abc529|0 +530|k1059:v1059,k1060:v1060|1158,1159|530,abc530|1 +531|k1061:v1061,k1062:v1062|1160,1161|531,abc531|0 +532|k1063:v1063,k1064:v1064|1162,1163|532,abc532|1 +533|k1065:v1065,k1066:v1066|1164,1165|533,abc533|0 +534|k1067:v1067,k1068:v1068|1166,1167|534,abc534|1 +535|k1069:v1069,k1070:v1070|1168,1169|535,abc535|0 +536|k1071:v1071,k1072:v1072|1170,1171|536,abc536|1 +537|k1073:v1073,k1074:v1074|1172,1173|537,abc537|0 +538|k1075:v1075,k1076:v1076|1174,1175|538,abc538|1 +539|k1077:v1077,k1078:v1078|1176,1177|539,abc539|0 +540|k1079:v1079,k1080:v1080|1178,1179|540,abc540|1 +541|k1081:v1081,k1082:v1082|1180,1181|541,abc541|0 +542|k1083:v1083,k1084:v1084|1182,1183|542,abc542|1 +543|k1085:v1085,k1086:v1086|1184,1185|543,abc543|0 +544|k1087:v1087,k1088:v1088|1186,1187|544,abc544|1 +545|k1089:v1089,k1090:v1090|1188,1189|545,abc545|0 +546|k1091:v1091,k1092:v1092|1190,1191|546,abc546|1 +547|k1093:v1093,k1094:v1094|1192,1193|547,abc547|0 +548|k1095:v1095,k1096:v1096|1194,1195|548,abc548|1 +549|k1097:v1097,k1098:v1098|1196,1197|549,abc549|0 +550|k1099:v1099,k1100:v1100|1198,1199|550,abc550|1 +551|k1101:v1101,k1102:v1102|1200,1201|551,abc551|0 +552|k1103:v1103,k1104:v1104|1202,1203|552,abc552|1 +553|k1105:v1105,k1106:v1106|1204,1205|553,abc553|0 +554|k1107:v1107,k1108:v1108|1206,1207|554,abc554|1 +555|k1109:v1109,k1110:v1110|1208,1209|555,abc555|0 +556|k1111:v1111,k1112:v1112|1210,1211|556,abc556|1 +557|k1113:v1113,k1114:v1114|1212,1213|557,abc557|0 +558|k1115:v1115,k1116:v1116|1214,1215|558,abc558|1 +559|k1117:v1117,k1118:v1118|1216,1217|559,abc559|0 +560|k1119:v1119,k1120:v1120|1218,1219|560,abc560|1 +561|k1121:v1121,k1122:v1122|1220,1221|561,abc561|0 +562|k1123:v1123,k1124:v1124|1222,1223|562,abc562|1 +563|k1125:v1125,k1126:v1126|1224,1225|563,abc563|0 +564|k1127:v1127,k1128:v1128|1226,1227|564,abc564|1 +565|k1129:v1129,k1130:v1130|1228,1229|565,abc565|0 +566|k1131:v1131,k1132:v1132|1230,1231|566,abc566|1 +567|k1133:v1133,k1134:v1134|1232,1233|567,abc567|0 +568|k1135:v1135,k1136:v1136|1234,1235|568,abc568|1 +569|k1137:v1137,k1138:v1138|1236,1237|569,abc569|0 +570|k1139:v1139,k1140:v1140|1238,1239|570,abc570|1 +571|k1141:v1141,k1142:v1142|1240,1241|571,abc571|0 +572|k1143:v1143,k1144:v1144|1242,1243|572,abc572|1 +573|k1145:v1145,k1146:v1146|1244,1245|573,abc573|0 +574|k1147:v1147,k1148:v1148|1246,1247|574,abc574|1 +575|k1149:v1149,k1150:v1150|1248,1249|575,abc575|0 +576|k1151:v1151,k1152:v1152|1250,1251|576,abc576|1 +577|k1153:v1153,k1154:v1154|1252,1253|577,abc577|0 +578|k1155:v1155,k1156:v1156|1254,1255|578,abc578|1 +579|k1157:v1157,k1158:v1158|1256,1257|579,abc579|0 +580|k1159:v1159,k1160:v1160|1258,1259|580,abc580|1 +581|k1161:v1161,k1162:v1162|1260,1261|581,abc581|0 +582|k1163:v1163,k1164:v1164|1262,1263|582,abc582|1 +583|k1165:v1165,k1166:v1166|1264,1265|583,abc583|0 +584|k1167:v1167,k1168:v1168|1266,1267|584,abc584|1 +585|k1169:v1169,k1170:v1170|1268,1269|585,abc585|0 +586|k1171:v1171,k1172:v1172|1270,1271|586,abc586|1 +587|k1173:v1173,k1174:v1174|1272,1273|587,abc587|0 +588|k1175:v1175,k1176:v1176|1274,1275|588,abc588|1 +589|k1177:v1177,k1178:v1178|1276,1277|589,abc589|0 +590|k1179:v1179,k1180:v1180|1278,1279|590,abc590|1 +591|k1181:v1181,k1182:v1182|1280,1281|591,abc591|0 +592|k1183:v1183,k1184:v1184|1282,1283|592,abc592|1 +593|k1185:v1185,k1186:v1186|1284,1285|593,abc593|0 +594|k1187:v1187,k1188:v1188|1286,1287|594,abc594|1 +595|k1189:v1189,k1190:v1190|1288,1289|595,abc595|0 +596|k1191:v1191,k1192:v1192|1290,1291|596,abc596|1 +597|k1193:v1193,k1194:v1194|1292,1293|597,abc597|0 +598|k1195:v1195,k1196:v1196|1294,1295|598,abc598|1 +599|k1197:v1197,k1198:v1198|1296,1297|599,abc599|0 +600|k1199:v1199,k1200:v1200|1298,1299|600,abc600|1 +601|k1201:v1201,k1202:v1202|1300,1301|601,abc601|0 +602|k1203:v1203,k1204:v1204|1302,1303|602,abc602|1 +603|k1205:v1205,k1206:v1206|1304,1305|603,abc603|0 +604|k1207:v1207,k1208:v1208|1306,1307|604,abc604|1 +605|k1209:v1209,k1210:v1210|1308,1309|605,abc605|0 +606|k1211:v1211,k1212:v1212|1310,1311|606,abc606|1 +607|k1213:v1213,k1214:v1214|1312,1313|607,abc607|0 +608|k1215:v1215,k1216:v1216|1314,1315|608,abc608|1 +609|k1217:v1217,k1218:v1218|1316,1317|609,abc609|0 +610|k1219:v1219,k1220:v1220|1318,1319|610,abc610|1 +611|k1221:v1221,k1222:v1222|1320,1321|611,abc611|0 +612|k1223:v1223,k1224:v1224|1322,1323|612,abc612|1 +613|k1225:v1225,k1226:v1226|1324,1325|613,abc613|0 +614|k1227:v1227,k1228:v1228|1326,1327|614,abc614|1 +615|k1229:v1229,k1230:v1230|1328,1329|615,abc615|0 +616|k1231:v1231,k1232:v1232|1330,1331|616,abc616|1 +617|k1233:v1233,k1234:v1234|1332,1333|617,abc617|0 +618|k1235:v1235,k1236:v1236|1334,1335|618,abc618|1 +619|k1237:v1237,k1238:v1238|1336,1337|619,abc619|0 +620|k1239:v1239,k1240:v1240|1338,1339|620,abc620|1 +621|k1241:v1241,k1242:v1242|1340,1341|621,abc621|0 +622|k1243:v1243,k1244:v1244|1342,1343|622,abc622|1 +623|k1245:v1245,k1246:v1246|1344,1345|623,abc623|0 +624|k1247:v1247,k1248:v1248|1346,1347|624,abc624|1 +625|k1249:v1249,k1250:v1250|1348,1349|625,abc625|0 +626|k1251:v1251,k1252:v1252|1350,1351|626,abc626|1 +627|k1253:v1253,k1254:v1254|1352,1353|627,abc627|0 +628|k1255:v1255,k1256:v1256|1354,1355|628,abc628|1 +629|k1257:v1257,k1258:v1258|1356,1357|629,abc629|0 +630|k1259:v1259,k1260:v1260|1358,1359|630,abc630|1 +631|k1261:v1261,k1262:v1262|1360,1361|631,abc631|0 +632|k1263:v1263,k1264:v1264|1362,1363|632,abc632|1 +633|k1265:v1265,k1266:v1266|1364,1365|633,abc633|0 +634|k1267:v1267,k1268:v1268|1366,1367|634,abc634|1 +635|k1269:v1269,k1270:v1270|1368,1369|635,abc635|0 +636|k1271:v1271,k1272:v1272|1370,1371|636,abc636|1 +637|k1273:v1273,k1274:v1274|1372,1373|637,abc637|0 +638|k1275:v1275,k1276:v1276|1374,1375|638,abc638|1 +639|k1277:v1277,k1278:v1278|1376,1377|639,abc639|0 +640|k1279:v1279,k1280:v1280|1378,1379|640,abc640|1 +641|k1281:v1281,k1282:v1282|1380,1381|641,abc641|0 +642|k1283:v1283,k1284:v1284|1382,1383|642,abc642|1 +643|k1285:v1285,k1286:v1286|1384,1385|643,abc643|0 +644|k1287:v1287,k1288:v1288|1386,1387|644,abc644|1 +645|k1289:v1289,k1290:v1290|1388,1389|645,abc645|0 +646|k1291:v1291,k1292:v1292|1390,1391|646,abc646|1 +647|k1293:v1293,k1294:v1294|1392,1393|647,abc647|0 +648|k1295:v1295,k1296:v1296|1394,1395|648,abc648|1 +649|k1297:v1297,k1298:v1298|1396,1397|649,abc649|0 +650|k1299:v1299,k1300:v1300|1398,1399|650,abc650|1 +651|k1301:v1301,k1302:v1302|1400,1401|651,abc651|0 +652|k1303:v1303,k1304:v1304|1402,1403|652,abc652|1 +653|k1305:v1305,k1306:v1306|1404,1405|653,abc653|0 +654|k1307:v1307,k1308:v1308|1406,1407|654,abc654|1 +655|k1309:v1309,k1310:v1310|1408,1409|655,abc655|0 +656|k1311:v1311,k1312:v1312|1410,1411|656,abc656|1 +657|k1313:v1313,k1314:v1314|1412,1413|657,abc657|0 +658|k1315:v1315,k1316:v1316|1414,1415|658,abc658|1 +659|k1317:v1317,k1318:v1318|1416,1417|659,abc659|0 +660|k1319:v1319,k1320:v1320|1418,1419|660,abc660|1 +661|k1321:v1321,k1322:v1322|1420,1421|661,abc661|0 +662|k1323:v1323,k1324:v1324|1422,1423|662,abc662|1 +663|k1325:v1325,k1326:v1326|1424,1425|663,abc663|0 +664|k1327:v1327,k1328:v1328|1426,1427|664,abc664|1 +665|k1329:v1329,k1330:v1330|1428,1429|665,abc665|0 +666|k1331:v1331,k1332:v1332|1430,1431|666,abc666|1 +667|k1333:v1333,k1334:v1334|1432,1433|667,abc667|0 +668|k1335:v1335,k1336:v1336|1434,1435|668,abc668|1 +669|k1337:v1337,k1338:v1338|1436,1437|669,abc669|0 +670|k1339:v1339,k1340:v1340|1438,1439|670,abc670|1 +671|k1341:v1341,k1342:v1342|1440,1441|671,abc671|0 +672|k1343:v1343,k1344:v1344|1442,1443|672,abc672|1 +673|k1345:v1345,k1346:v1346|1444,1445|673,abc673|0 +674|k1347:v1347,k1348:v1348|1446,1447|674,abc674|1 +675|k1349:v1349,k1350:v1350|1448,1449|675,abc675|0 +676|k1351:v1351,k1352:v1352|1450,1451|676,abc676|1 +677|k1353:v1353,k1354:v1354|1452,1453|677,abc677|0 +678|k1355:v1355,k1356:v1356|1454,1455|678,abc678|1 +679|k1357:v1357,k1358:v1358|1456,1457|679,abc679|0 +680|k1359:v1359,k1360:v1360|1458,1459|680,abc680|1 +681|k1361:v1361,k1362:v1362|1460,1461|681,abc681|0 +682|k1363:v1363,k1364:v1364|1462,1463|682,abc682|1 +683|k1365:v1365,k1366:v1366|1464,1465|683,abc683|0 +684|k1367:v1367,k1368:v1368|1466,1467|684,abc684|1 +685|k1369:v1369,k1370:v1370|1468,1469|685,abc685|0 +686|k1371:v1371,k1372:v1372|1470,1471|686,abc686|1 +687|k1373:v1373,k1374:v1374|1472,1473|687,abc687|0 +688|k1375:v1375,k1376:v1376|1474,1475|688,abc688|1 +689|k1377:v1377,k1378:v1378|1476,1477|689,abc689|0 +690|k1379:v1379,k1380:v1380|1478,1479|690,abc690|1 +691|k1381:v1381,k1382:v1382|1480,1481|691,abc691|0 +692|k1383:v1383,k1384:v1384|1482,1483|692,abc692|1 +693|k1385:v1385,k1386:v1386|1484,1485|693,abc693|0 +694|k1387:v1387,k1388:v1388|1486,1487|694,abc694|1 +695|k1389:v1389,k1390:v1390|1488,1489|695,abc695|0 +696|k1391:v1391,k1392:v1392|1490,1491|696,abc696|1 +697|k1393:v1393,k1394:v1394|1492,1493|697,abc697|0 +698|k1395:v1395,k1396:v1396|1494,1495|698,abc698|1 +699|k1397:v1397,k1398:v1398|1496,1497|699,abc699|0 +700|k1399:v1399,k1400:v1400|1498,1499|700,abc700|1 +701|k1401:v1401,k1402:v1402|1500,1501|701,abc701|0 +702|k1403:v1403,k1404:v1404|1502,1503|702,abc702|1 +703|k1405:v1405,k1406:v1406|1504,1505|703,abc703|0 +704|k1407:v1407,k1408:v1408|1506,1507|704,abc704|1 +705|k1409:v1409,k1410:v1410|1508,1509|705,abc705|0 +706|k1411:v1411,k1412:v1412|1510,1511|706,abc706|1 +707|k1413:v1413,k1414:v1414|1512,1513|707,abc707|0 +708|k1415:v1415,k1416:v1416|1514,1515|708,abc708|1 +709|k1417:v1417,k1418:v1418|1516,1517|709,abc709|0 +710|k1419:v1419,k1420:v1420|1518,1519|710,abc710|1 +711|k1421:v1421,k1422:v1422|1520,1521|711,abc711|0 +712|k1423:v1423,k1424:v1424|1522,1523|712,abc712|1 +713|k1425:v1425,k1426:v1426|1524,1525|713,abc713|0 +714|k1427:v1427,k1428:v1428|1526,1527|714,abc714|1 +715|k1429:v1429,k1430:v1430|1528,1529|715,abc715|0 +716|k1431:v1431,k1432:v1432|1530,1531|716,abc716|1 +717|k1433:v1433,k1434:v1434|1532,1533|717,abc717|0 +718|k1435:v1435,k1436:v1436|1534,1535|718,abc718|1 +719|k1437:v1437,k1438:v1438|1536,1537|719,abc719|0 +720|k1439:v1439,k1440:v1440|1538,1539|720,abc720|1 +721|k1441:v1441,k1442:v1442|1540,1541|721,abc721|0 +722|k1443:v1443,k1444:v1444|1542,1543|722,abc722|1 +723|k1445:v1445,k1446:v1446|1544,1545|723,abc723|0 +724|k1447:v1447,k1448:v1448|1546,1547|724,abc724|1 +725|k1449:v1449,k1450:v1450|1548,1549|725,abc725|0 +726|k1451:v1451,k1452:v1452|1550,1551|726,abc726|1 +727|k1453:v1453,k1454:v1454|1552,1553|727,abc727|0 +728|k1455:v1455,k1456:v1456|1554,1555|728,abc728|1 +729|k1457:v1457,k1458:v1458|1556,1557|729,abc729|0 +730|k1459:v1459,k1460:v1460|1558,1559|730,abc730|1 +731|k1461:v1461,k1462:v1462|1560,1561|731,abc731|0 +732|k1463:v1463,k1464:v1464|1562,1563|732,abc732|1 +733|k1465:v1465,k1466:v1466|1564,1565|733,abc733|0 +734|k1467:v1467,k1468:v1468|1566,1567|734,abc734|1 +735|k1469:v1469,k1470:v1470|1568,1569|735,abc735|0 +736|k1471:v1471,k1472:v1472|1570,1571|736,abc736|1 +737|k1473:v1473,k1474:v1474|1572,1573|737,abc737|0 +738|k1475:v1475,k1476:v1476|1574,1575|738,abc738|1 +739|k1477:v1477,k1478:v1478|1576,1577|739,abc739|0 +740|k1479:v1479,k1480:v1480|1578,1579|740,abc740|1 +741|k1481:v1481,k1482:v1482|1580,1581|741,abc741|0 +742|k1483:v1483,k1484:v1484|1582,1583|742,abc742|1 +743|k1485:v1485,k1486:v1486|1584,1585|743,abc743|0 +744|k1487:v1487,k1488:v1488|1586,1587|744,abc744|1 +745|k1489:v1489,k1490:v1490|1588,1589|745,abc745|0 +746|k1491:v1491,k1492:v1492|1590,1591|746,abc746|1 +747|k1493:v1493,k1494:v1494|1592,1593|747,abc747|0 +748|k1495:v1495,k1496:v1496|1594,1595|748,abc748|1 +749|k1497:v1497,k1498:v1498|1596,1597|749,abc749|0 +750|k1499:v1499,k1500:v1500|1598,1599|750,abc750|1 +751|k1501:v1501,k1502:v1502|1600,1601|751,abc751|0 +752|k1503:v1503,k1504:v1504|1602,1603|752,abc752|1 +753|k1505:v1505,k1506:v1506|1604,1605|753,abc753|0 +754|k1507:v1507,k1508:v1508|1606,1607|754,abc754|1 +755|k1509:v1509,k1510:v1510|1608,1609|755,abc755|0 +756|k1511:v1511,k1512:v1512|1610,1611|756,abc756|1 +757|k1513:v1513,k1514:v1514|1612,1613|757,abc757|0 +758|k1515:v1515,k1516:v1516|1614,1615|758,abc758|1 +759|k1517:v1517,k1518:v1518|1616,1617|759,abc759|0 +760|k1519:v1519,k1520:v1520|1618,1619|760,abc760|1 +761|k1521:v1521,k1522:v1522|1620,1621|761,abc761|0 +762|k1523:v1523,k1524:v1524|1622,1623|762,abc762|1 +763|k1525:v1525,k1526:v1526|1624,1625|763,abc763|0 +764|k1527:v1527,k1528:v1528|1626,1627|764,abc764|1 +765|k1529:v1529,k1530:v1530|1628,1629|765,abc765|0 +766|k1531:v1531,k1532:v1532|1630,1631|766,abc766|1 +767|k1533:v1533,k1534:v1534|1632,1633|767,abc767|0 +768|k1535:v1535,k1536:v1536|1634,1635|768,abc768|1 +769|k1537:v1537,k1538:v1538|1636,1637|769,abc769|0 +770|k1539:v1539,k1540:v1540|1638,1639|770,abc770|1 +771|k1541:v1541,k1542:v1542|1640,1641|771,abc771|0 +772|k1543:v1543,k1544:v1544|1642,1643|772,abc772|1 +773|k1545:v1545,k1546:v1546|1644,1645|773,abc773|0 +774|k1547:v1547,k1548:v1548|1646,1647|774,abc774|1 +775|k1549:v1549,k1550:v1550|1648,1649|775,abc775|0 +776|k1551:v1551,k1552:v1552|1650,1651|776,abc776|1 +777|k1553:v1553,k1554:v1554|1652,1653|777,abc777|0 +778|k1555:v1555,k1556:v1556|1654,1655|778,abc778|1 +779|k1557:v1557,k1558:v1558|1656,1657|779,abc779|0 +780|k1559:v1559,k1560:v1560|1658,1659|780,abc780|1 +781|k1561:v1561,k1562:v1562|1660,1661|781,abc781|0 +782|k1563:v1563,k1564:v1564|1662,1663|782,abc782|1 +783|k1565:v1565,k1566:v1566|1664,1665|783,abc783|0 +784|k1567:v1567,k1568:v1568|1666,1667|784,abc784|1 +785|k1569:v1569,k1570:v1570|1668,1669|785,abc785|0 +786|k1571:v1571,k1572:v1572|1670,1671|786,abc786|1 +787|k1573:v1573,k1574:v1574|1672,1673|787,abc787|0 +788|k1575:v1575,k1576:v1576|1674,1675|788,abc788|1 +789|k1577:v1577,k1578:v1578|1676,1677|789,abc789|0 +790|k1579:v1579,k1580:v1580|1678,1679|790,abc790|1 +791|k1581:v1581,k1582:v1582|1680,1681|791,abc791|0 +792|k1583:v1583,k1584:v1584|1682,1683|792,abc792|1 +793|k1585:v1585,k1586:v1586|1684,1685|793,abc793|0 +794|k1587:v1587,k1588:v1588|1686,1687|794,abc794|1 +795|k1589:v1589,k1590:v1590|1688,1689|795,abc795|0 +796|k1591:v1591,k1592:v1592|1690,1691|796,abc796|1 +797|k1593:v1593,k1594:v1594|1692,1693|797,abc797|0 +798|k1595:v1595,k1596:v1596|1694,1695|798,abc798|1 +799|k1597:v1597,k1598:v1598|1696,1697|799,abc799|0 +800|k1599:v1599,k1600:v1600|1698,1699|800,abc800|1 +801|k1601:v1601,k1602:v1602|1700,1701|801,abc801|0 +802|k1603:v1603,k1604:v1604|1702,1703|802,abc802|1 +803|k1605:v1605,k1606:v1606|1704,1705|803,abc803|0 +804|k1607:v1607,k1608:v1608|1706,1707|804,abc804|1 +805|k1609:v1609,k1610:v1610|1708,1709|805,abc805|0 +806|k1611:v1611,k1612:v1612|1710,1711|806,abc806|1 +807|k1613:v1613,k1614:v1614|1712,1713|807,abc807|0 +808|k1615:v1615,k1616:v1616|1714,1715|808,abc808|1 +809|k1617:v1617,k1618:v1618|1716,1717|809,abc809|0 +810|k1619:v1619,k1620:v1620|1718,1719|810,abc810|1 +811|k1621:v1621,k1622:v1622|1720,1721|811,abc811|0 +812|k1623:v1623,k1624:v1624|1722,1723|812,abc812|1 +813|k1625:v1625,k1626:v1626|1724,1725|813,abc813|0 +814|k1627:v1627,k1628:v1628|1726,1727|814,abc814|1 +815|k1629:v1629,k1630:v1630|1728,1729|815,abc815|0 +816|k1631:v1631,k1632:v1632|1730,1731|816,abc816|1 +817|k1633:v1633,k1634:v1634|1732,1733|817,abc817|0 +818|k1635:v1635,k1636:v1636|1734,1735|818,abc818|1 +819|k1637:v1637,k1638:v1638|1736,1737|819,abc819|0 +820|k1639:v1639,k1640:v1640|1738,1739|820,abc820|1 +821|k1641:v1641,k1642:v1642|1740,1741|821,abc821|0 +822|k1643:v1643,k1644:v1644|1742,1743|822,abc822|1 +823|k1645:v1645,k1646:v1646|1744,1745|823,abc823|0 +824|k1647:v1647,k1648:v1648|1746,1747|824,abc824|1 +825|k1649:v1649,k1650:v1650|1748,1749|825,abc825|0 +826|k1651:v1651,k1652:v1652|1750,1751|826,abc826|1 +827|k1653:v1653,k1654:v1654|1752,1753|827,abc827|0 +828|k1655:v1655,k1656:v1656|1754,1755|828,abc828|1 +829|k1657:v1657,k1658:v1658|1756,1757|829,abc829|0 +830|k1659:v1659,k1660:v1660|1758,1759|830,abc830|1 +831|k1661:v1661,k1662:v1662|1760,1761|831,abc831|0 +832|k1663:v1663,k1664:v1664|1762,1763|832,abc832|1 +833|k1665:v1665,k1666:v1666|1764,1765|833,abc833|0 +834|k1667:v1667,k1668:v1668|1766,1767|834,abc834|1 +835|k1669:v1669,k1670:v1670|1768,1769|835,abc835|0 +836|k1671:v1671,k1672:v1672|1770,1771|836,abc836|1 +837|k1673:v1673,k1674:v1674|1772,1773|837,abc837|0 +838|k1675:v1675,k1676:v1676|1774,1775|838,abc838|1 +839|k1677:v1677,k1678:v1678|1776,1777|839,abc839|0 +840|k1679:v1679,k1680:v1680|1778,1779|840,abc840|1 +841|k1681:v1681,k1682:v1682|1780,1781|841,abc841|0 +842|k1683:v1683,k1684:v1684|1782,1783|842,abc842|1 +843|k1685:v1685,k1686:v1686|1784,1785|843,abc843|0 +844|k1687:v1687,k1688:v1688|1786,1787|844,abc844|1 +845|k1689:v1689,k1690:v1690|1788,1789|845,abc845|0 +846|k1691:v1691,k1692:v1692|1790,1791|846,abc846|1 +847|k1693:v1693,k1694:v1694|1792,1793|847,abc847|0 +848|k1695:v1695,k1696:v1696|1794,1795|848,abc848|1 +849|k1697:v1697,k1698:v1698|1796,1797|849,abc849|0 +850|k1699:v1699,k1700:v1700|1798,1799|850,abc850|1 +851|k1701:v1701,k1702:v1702|1800,1801|851,abc851|0 +852|k1703:v1703,k1704:v1704|1802,1803|852,abc852|1 +853|k1705:v1705,k1706:v1706|1804,1805|853,abc853|0 +854|k1707:v1707,k1708:v1708|1806,1807|854,abc854|1 +855|k1709:v1709,k1710:v1710|1808,1809|855,abc855|0 +856|k1711:v1711,k1712:v1712|1810,1811|856,abc856|1 +857|k1713:v1713,k1714:v1714|1812,1813|857,abc857|0 +858|k1715:v1715,k1716:v1716|1814,1815|858,abc858|1 +859|k1717:v1717,k1718:v1718|1816,1817|859,abc859|0 +860|k1719:v1719,k1720:v1720|1818,1819|860,abc860|1 +861|k1721:v1721,k1722:v1722|1820,1821|861,abc861|0 +862|k1723:v1723,k1724:v1724|1822,1823|862,abc862|1 +863|k1725:v1725,k1726:v1726|1824,1825|863,abc863|0 +864|k1727:v1727,k1728:v1728|1826,1827|864,abc864|1 +865|k1729:v1729,k1730:v1730|1828,1829|865,abc865|0 +866|k1731:v1731,k1732:v1732|1830,1831|866,abc866|1 +867|k1733:v1733,k1734:v1734|1832,1833|867,abc867|0 +868|k1735:v1735,k1736:v1736|1834,1835|868,abc868|1 +869|k1737:v1737,k1738:v1738|1836,1837|869,abc869|0 +870|k1739:v1739,k1740:v1740|1838,1839|870,abc870|1 +871|k1741:v1741,k1742:v1742|1840,1841|871,abc871|0 +872|k1743:v1743,k1744:v1744|1842,1843|872,abc872|1 +873|k1745:v1745,k1746:v1746|1844,1845|873,abc873|0 +874|k1747:v1747,k1748:v1748|1846,1847|874,abc874|1 +875|k1749:v1749,k1750:v1750|1848,1849|875,abc875|0 +876|k1751:v1751,k1752:v1752|1850,1851|876,abc876|1 +877|k1753:v1753,k1754:v1754|1852,1853|877,abc877|0 +878|k1755:v1755,k1756:v1756|1854,1855|878,abc878|1 +879|k1757:v1757,k1758:v1758|1856,1857|879,abc879|0 +880|k1759:v1759,k1760:v1760|1858,1859|880,abc880|1 +881|k1761:v1761,k1762:v1762|1860,1861|881,abc881|0 +882|k1763:v1763,k1764:v1764|1862,1863|882,abc882|1 +883|k1765:v1765,k1766:v1766|1864,1865|883,abc883|0 +884|k1767:v1767,k1768:v1768|1866,1867|884,abc884|1 +885|k1769:v1769,k1770:v1770|1868,1869|885,abc885|0 +886|k1771:v1771,k1772:v1772|1870,1871|886,abc886|1 +887|k1773:v1773,k1774:v1774|1872,1873|887,abc887|0 +888|k1775:v1775,k1776:v1776|1874,1875|888,abc888|1 +889|k1777:v1777,k1778:v1778|1876,1877|889,abc889|0 +890|k1779:v1779,k1780:v1780|1878,1879|890,abc890|1 +891|k1781:v1781,k1782:v1782|1880,1881|891,abc891|0 +892|k1783:v1783,k1784:v1784|1882,1883|892,abc892|1 +893|k1785:v1785,k1786:v1786|1884,1885|893,abc893|0 +894|k1787:v1787,k1788:v1788|1886,1887|894,abc894|1 +895|k1789:v1789,k1790:v1790|1888,1889|895,abc895|0 +896|k1791:v1791,k1792:v1792|1890,1891|896,abc896|1 +897|k1793:v1793,k1794:v1794|1892,1893|897,abc897|0 +898|k1795:v1795,k1796:v1796|1894,1895|898,abc898|1 +899|k1797:v1797,k1798:v1798|1896,1897|899,abc899|0 +900|k1799:v1799,k1800:v1800|1898,1899|900,abc900|1 +901|k1801:v1801,k1802:v1802|1900,1901|901,abc901|0 +902|k1803:v1803,k1804:v1804|1902,1903|902,abc902|1 +903|k1805:v1805,k1806:v1806|1904,1905|903,abc903|0 +904|k1807:v1807,k1808:v1808|1906,1907|904,abc904|1 +905|k1809:v1809,k1810:v1810|1908,1909|905,abc905|0 +906|k1811:v1811,k1812:v1812|1910,1911|906,abc906|1 +907|k1813:v1813,k1814:v1814|1912,1913|907,abc907|0 +908|k1815:v1815,k1816:v1816|1914,1915|908,abc908|1 +909|k1817:v1817,k1818:v1818|1916,1917|909,abc909|0 +910|k1819:v1819,k1820:v1820|1918,1919|910,abc910|1 +911|k1821:v1821,k1822:v1822|1920,1921|911,abc911|0 +912|k1823:v1823,k1824:v1824|1922,1923|912,abc912|1 +913|k1825:v1825,k1826:v1826|1924,1925|913,abc913|0 +914|k1827:v1827,k1828:v1828|1926,1927|914,abc914|1 +915|k1829:v1829,k1830:v1830|1928,1929|915,abc915|0 +916|k1831:v1831,k1832:v1832|1930,1931|916,abc916|1 +917|k1833:v1833,k1834:v1834|1932,1933|917,abc917|0 +918|k1835:v1835,k1836:v1836|1934,1935|918,abc918|1 +919|k1837:v1837,k1838:v1838|1936,1937|919,abc919|0 +920|k1839:v1839,k1840:v1840|1938,1939|920,abc920|1 +921|k1841:v1841,k1842:v1842|1940,1941|921,abc921|0 +922|k1843:v1843,k1844:v1844|1942,1943|922,abc922|1 +923|k1845:v1845,k1846:v1846|1944,1945|923,abc923|0 +924|k1847:v1847,k1848:v1848|1946,1947|924,abc924|1 +925|k1849:v1849,k1850:v1850|1948,1949|925,abc925|0 +926|k1851:v1851,k1852:v1852|1950,1951|926,abc926|1 +927|k1853:v1853,k1854:v1854|1952,1953|927,abc927|0 +928|k1855:v1855,k1856:v1856|1954,1955|928,abc928|1 +929|k1857:v1857,k1858:v1858|1956,1957|929,abc929|0 +930|k1859:v1859,k1860:v1860|1958,1959|930,abc930|1 +931|k1861:v1861,k1862:v1862|1960,1961|931,abc931|0 +932|k1863:v1863,k1864:v1864|1962,1963|932,abc932|1 +933|k1865:v1865,k1866:v1866|1964,1965|933,abc933|0 +934|k1867:v1867,k1868:v1868|1966,1967|934,abc934|1 +935|k1869:v1869,k1870:v1870|1968,1969|935,abc935|0 +936|k1871:v1871,k1872:v1872|1970,1971|936,abc936|1 +937|k1873:v1873,k1874:v1874|1972,1973|937,abc937|0 +938|k1875:v1875,k1876:v1876|1974,1975|938,abc938|1 +939|k1877:v1877,k1878:v1878|1976,1977|939,abc939|0 +940|k1879:v1879,k1880:v1880|1978,1979|940,abc940|1 +941|k1881:v1881,k1882:v1882|1980,1981|941,abc941|0 +942|k1883:v1883,k1884:v1884|1982,1983|942,abc942|1 +943|k1885:v1885,k1886:v1886|1984,1985|943,abc943|0 +944|k1887:v1887,k1888:v1888|1986,1987|944,abc944|1 +945|k1889:v1889,k1890:v1890|1988,1989|945,abc945|0 +946|k1891:v1891,k1892:v1892|1990,1991|946,abc946|1 +947|k1893:v1893,k1894:v1894|1992,1993|947,abc947|0 +948|k1895:v1895,k1896:v1896|1994,1995|948,abc948|1 +949|k1897:v1897,k1898:v1898|1996,1997|949,abc949|0 +950|k1899:v1899,k1900:v1900|1998,1999|950,abc950|1 +951|k1901:v1901,k1902:v1902|2000,2001|951,abc951|0 +952|k1903:v1903,k1904:v1904|2002,2003|952,abc952|1 +953|k1905:v1905,k1906:v1906|2004,2005|953,abc953|0 +954|k1907:v1907,k1908:v1908|2006,2007|954,abc954|1 +955|k1909:v1909,k1910:v1910|2008,2009|955,abc955|0 +956|k1911:v1911,k1912:v1912|2010,2011|956,abc956|1 +957|k1913:v1913,k1914:v1914|2012,2013|957,abc957|0 +958|k1915:v1915,k1916:v1916|2014,2015|958,abc958|1 +959|k1917:v1917,k1918:v1918|2016,2017|959,abc959|0 +960|k1919:v1919,k1920:v1920|2018,2019|960,abc960|1 +961|k1921:v1921,k1922:v1922|2020,2021|961,abc961|0 +962|k1923:v1923,k1924:v1924|2022,2023|962,abc962|1 +963|k1925:v1925,k1926:v1926|2024,2025|963,abc963|0 +964|k1927:v1927,k1928:v1928|2026,2027|964,abc964|1 +965|k1929:v1929,k1930:v1930|2028,2029|965,abc965|0 +966|k1931:v1931,k1932:v1932|2030,2031|966,abc966|1 +967|k1933:v1933,k1934:v1934|2032,2033|967,abc967|0 +968|k1935:v1935,k1936:v1936|2034,2035|968,abc968|1 +969|k1937:v1937,k1938:v1938|2036,2037|969,abc969|0 +970|k1939:v1939,k1940:v1940|2038,2039|970,abc970|1 +971|k1941:v1941,k1942:v1942|2040,2041|971,abc971|0 +972|k1943:v1943,k1944:v1944|2042,2043|972,abc972|1 +973|k1945:v1945,k1946:v1946|2044,2045|973,abc973|0 +974|k1947:v1947,k1948:v1948|2046,2047|974,abc974|1 +975|k1949:v1949,k1950:v1950|2048,2049|975,abc975|0 +976|k1951:v1951,k1952:v1952|2050,2051|976,abc976|1 +977|k1953:v1953,k1954:v1954|2052,2053|977,abc977|0 +978|k1955:v1955,k1956:v1956|2054,2055|978,abc978|1 +979|k1957:v1957,k1958:v1958|2056,2057|979,abc979|0 +980|k1959:v1959,k1960:v1960|2058,2059|980,abc980|1 +981|k1961:v1961,k1962:v1962|2060,2061|981,abc981|0 +982|k1963:v1963,k1964:v1964|2062,2063|982,abc982|1 +983|k1965:v1965,k1966:v1966|2064,2065|983,abc983|0 +984|k1967:v1967,k1968:v1968|2066,2067|984,abc984|1 +985|k1969:v1969,k1970:v1970|2068,2069|985,abc985|0 +986|k1971:v1971,k1972:v1972|2070,2071|986,abc986|1 +987|k1973:v1973,k1974:v1974|2072,2073|987,abc987|0 +988|k1975:v1975,k1976:v1976|2074,2075|988,abc988|1 +989|k1977:v1977,k1978:v1978|2076,2077|989,abc989|0 +990|k1979:v1979,k1980:v1980|2078,2079|990,abc990|1 +991|k1981:v1981,k1982:v1982|2080,2081|991,abc991|0 +992|k1983:v1983,k1984:v1984|2082,2083|992,abc992|1 +993|k1985:v1985,k1986:v1986|2084,2085|993,abc993|0 +994|k1987:v1987,k1988:v1988|2086,2087|994,abc994|1 +995|k1989:v1989,k1990:v1990|2088,2089|995,abc995|0 +996|k1991:v1991,k1992:v1992|2090,2091|996,abc996|1 +997|k1993:v1993,k1994:v1994|2092,2093|997,abc997|0 +998|k1995:v1995,k1996:v1996|2094,2095|998,abc998|1 +999|k1997:v1997,k1998:v1998|2096,2097|999,abc999|0 +1000|k1999:v1999,k2000:v2000|2098,2099|1000,abc1000|1 +1001|k2001:v2001,k2002:v2002|2100,2101|1001,abc1001|0 +1002|k2003:v2003,k2004:v2004|2102,2103|1002,abc1002|1 +1003|k2005:v2005,k2006:v2006|2104,2105|1003,abc1003|0 +1004|k2007:v2007,k2008:v2008|2106,2107|1004,abc1004|1 +1005|k2009:v2009,k2010:v2010|2108,2109|1005,abc1005|0 +1006|k2011:v2011,k2012:v2012|2110,2111|1006,abc1006|1 +1007|k2013:v2013,k2014:v2014|2112,2113|1007,abc1007|0 +1008|k2015:v2015,k2016:v2016|2114,2115|1008,abc1008|1 +1009|k2017:v2017,k2018:v2018|2116,2117|1009,abc1009|0 +1010|k2019:v2019,k2020:v2020|2118,2119|1010,abc1010|1 +1011|k2021:v2021,k2022:v2022|2120,2121|1011,abc1011|0 +1012|k2023:v2023,k2024:v2024|2122,2123|1012,abc1012|1 +1013|k2025:v2025,k2026:v2026|2124,2125|1013,abc1013|0 +1014|k2027:v2027,k2028:v2028|2126,2127|1014,abc1014|1 +1015|k2029:v2029,k2030:v2030|2128,2129|1015,abc1015|0 +1016|k2031:v2031,k2032:v2032|2130,2131|1016,abc1016|1 +1017|k2033:v2033,k2034:v2034|2132,2133|1017,abc1017|0 +1018|k2035:v2035,k2036:v2036|2134,2135|1018,abc1018|1 +1019|k2037:v2037,k2038:v2038|2136,2137|1019,abc1019|0 +1020|k2039:v2039,k2040:v2040|2138,2139|1020,abc1020|1 +1021|k2041:v2041,k2042:v2042|2140,2141|1021,abc1021|0 +1022|k2043:v2043,k2044:v2044|2142,2143|1022,abc1022|1 +1023|k2045:v2045,k2046:v2046|2144,2145|1023,abc1023|0 +1024|k2047:v2047,k2048:v2048|2146,2147|1024,abc1024|1 +1025|k2049:v2049,k2050:v2050|2148,2149|1025,abc1025|0 \ No newline at end of file diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index ab3cd43158..17e1994ceb 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -78,6 +78,7 @@ minillap.shared.query.files=insert_into1.q,\ orc_merge_diff_fs.q,\ parallel_colstats.q,\ parquet_types_vectorization.q,\ + parquet_complex_types_vectorization.q,\ union_type_chk.q,\ cte_2.q,\ cte_4.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java index f7e3ff30ea..7a434b367c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java @@ -78,13 +78,14 @@ BINARY (0x400), STRUCT (0x800), DECIMAL_64 (0x1000), + LIST (0X2000), INT_DECIMAL_64_FAMILY (INT_FAMILY.value | DECIMAL_64.value), DATETIME_FAMILY (DATE.value | TIMESTAMP.value), INTERVAL_FAMILY (INTERVAL_YEAR_MONTH.value | INTERVAL_DAY_TIME.value), INT_INTERVAL_YEAR_MONTH (INT_FAMILY.value | INTERVAL_YEAR_MONTH.value), INT_DATE_INTERVAL_YEAR_MONTH (INT_FAMILY.value | DATE.value | INTERVAL_YEAR_MONTH.value), STRING_DATETIME_FAMILY (STRING_FAMILY.value | DATETIME_FAMILY.value), - ALL_FAMILY (0xFFF); + ALL_FAMILY (0xFFFF); private final int value; @@ -127,6 +128,8 @@ public static ArgumentType fromHiveTypeName(String hiveTypeName) { return INTERVAL_DAY_TIME; } else if (VectorizationContext.structTypePattern.matcher(lower).matches()) { return STRUCT; + } else if (VectorizationContext.listTypePattern.matcher(lower).matches()) { + return LIST; } else if (lower.equals("void")) { // The old code let void through... return INT_FAMILY; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 13eff51ce3..096bfe2e3e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -423,6 +423,9 @@ public DataTypePhysicalVariation getDataTypePhysicalVariation(int columnNum) thr public static final Pattern structTypePattern = Pattern.compile("struct.*", Pattern.CASE_INSENSITIVE); + public static final Pattern listTypePattern = Pattern.compile("array.*", + Pattern.CASE_INSENSITIVE); + //Map column number to type private OutputColumnManager ocm; @@ -3296,6 +3299,8 @@ static String getUndecoratedName(String hiveTypeName) throws HiveException { return hiveTypeName; case STRUCT: return "Struct"; + case LIST: + return "List"; default: throw new HiveException("Unexpected hive type name " + hiveTypeName); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ListIndexColColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ListIndexColColumn.java new file mode 100644 index 0000000000..eaa0b0b0b2 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ListIndexColColumn.java @@ -0,0 +1,122 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; + +/** + * Vectorized instruction to get an element from a list with the index from another column and put + * the result in an output column. + */ +public class ListIndexColColumn extends VectorExpression { + private static final long serialVersionUID = 1L; + + private int listColumnNum; + private int indexColumnNum; + + public ListIndexColColumn() { + super(); + } + + public ListIndexColColumn(int listColumnNum, int indexColumnNum, int outputColumnNum) { + super(outputColumnNum); + this.listColumnNum = listColumnNum; + this.indexColumnNum = indexColumnNum; + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + ColumnVector outV = batch.cols[outputColumnNum]; + ListColumnVector listV = (ListColumnVector) batch.cols[listColumnNum]; + ColumnVector childV = listV.child; + LongColumnVector indexColumnVector = (LongColumnVector) batch.cols[indexColumnNum]; + long[] indexV = indexColumnVector.vector; + + outV.noNulls = true; + if (listV.isRepeating) { + if (listV.isNull[0]) { + outV.isNull[0] = true; + outV.noNulls = false; + outV.isRepeating = true; + } else { + if (indexColumnVector.isRepeating) { + if (indexV[0] >= listV.lengths[0]) { + outV.isNull[0] = true; + outV.noNulls = false; + } else { + outV.setElement(0, (int) (listV.offsets[0] + indexV[0]), childV); + outV.isNull[0] = false; + } + outV.isRepeating = true; + } else { + for (int i = 0; i < batch.size; i++) { + int j = (batch.selectedInUse) ? batch.selected[i] : i; + if (indexV[j] >= listV.lengths[0]) { + outV.isNull[j] = true; + outV.noNulls = false; + } else { + outV.setElement(j, (int) (listV.offsets[0] + indexV[j]), childV); + outV.isNull[j] = false; + } + } + outV.isRepeating = false; + } + } + } else { + for (int i = 0; i < batch.size; i++) { + int j = (batch.selectedInUse) ? batch.selected[i] : i; + if (listV.isNull[j] || indexV[j] >= listV.lengths[j]) { + outV.isNull[j] = true; + outV.noNulls = false; + } else { + outV.setElement(j, (int) (listV.offsets[j] + indexV[j]), childV); + outV.isNull[j] = false; + } + } + outV.isRepeating = false; + } + } + + @Override + public String vectorExpressionParameters() { + return getColumnParamString(0, listColumnNum) + ", " + getColumnParamString(1, indexColumnNum); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LIST, + VectorExpressionDescriptor.ArgumentType.INT_FAMILY) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ListIndexColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ListIndexColScalar.java new file mode 100644 index 0000000000..7231606e42 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ListIndexColScalar.java @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; + +/** + * Vectorized instruction to get an element from a list with a scalar index and put + * the result in an output column. + */ +public class ListIndexColScalar extends VectorExpression { + private static final long serialVersionUID = 1L; + + private int listColumnNum; + private int index; + + public ListIndexColScalar() { + super(); + } + + public ListIndexColScalar(int listColumn, int index, int outputColumnNum) { + super(outputColumnNum); + this.listColumnNum = listColumn; + this.index = index; + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + ColumnVector outV = batch.cols[outputColumnNum]; + ListColumnVector listV = (ListColumnVector) batch.cols[listColumnNum]; + ColumnVector childV = listV.child; + + outV.noNulls = true; + if (listV.isRepeating) { + if (listV.isNull[0]) { + outV.isNull[0] = true; + outV.noNulls = false; + } else { + if (index >= listV.lengths[0]) { + outV.isNull[0] = true; + outV.noNulls = false; + } else { + outV.setElement(0, (int) (listV.offsets[0] + index), childV); + outV.isNull[0] = false; + } + } + outV.isRepeating = true; + } else { + for (int i = 0; i < batch.size; i++) { + int j = (batch.selectedInUse) ? batch.selected[i] : i; + if (listV.isNull[j] || index >= listV.lengths[j]) { + outV.isNull[j] = true; + outV.noNulls = false; + } else { + outV.setElement(j, (int) (listV.offsets[j] + index), childV); + outV.isNull[j] = false; + } + } + outV.isRepeating = false; + } + } + + @Override + public String vectorExpressionParameters() { + return getColumnParamString(0, listColumnNum) + ", " + getColumnParamString(1, index); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LIST, + VectorExpressionDescriptor.ArgumentType.INT_FAMILY) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index d8df5cc9be..8733064c5b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -62,6 +62,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableStringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Text; @@ -1403,11 +1404,14 @@ private static VectorExpressionWriter genVectorExpressionWritableList( SettableListObjectInspector fieldObjInspector) throws HiveException { return new VectorExpressionWriterList() { - private Object obj; + private VectorExtractRow vectorExtractRow; + private ListTypeInfo listTypeInfo; public VectorExpressionWriter init(SettableListObjectInspector objInspector) throws HiveException { super.init(objInspector); - obj = initValue(null); + vectorExtractRow = new VectorExtractRow(); + listTypeInfo = (ListTypeInfo) + TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; } @@ -1419,14 +1423,26 @@ public Object initValue(Object ignored) { @Override public Object writeValue(ColumnVector column, int row) throws HiveException { - throw new HiveException("Not implemented yet"); + return setValue(null, column, row); } @Override public Object setValue(Object row, ColumnVector column, int columnRow) throws HiveException { - throw new HiveException("Not implemented yet"); + final ListColumnVector listColVector = (ListColumnVector) column; + final SettableListObjectInspector listOI = + (SettableListObjectInspector) this.objectInspector; + final List value = (List)vectorExtractRow.extractRowColumn(listColVector, + listTypeInfo, listOI, columnRow); + if (null == row) { + row = ((SettableListObjectInspector) this.objectInspector).create(value.size()); + } + for (int i = 0; i < value.size(); i++) { + listOI.set(row, i, value.get(i)); + } + return row; } + }.init(fieldObjInspector); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java index bdb2361b3b..3db96eca1a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java @@ -22,6 +22,9 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.ListIndexColColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.ListIndexColScalar; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -37,6 +40,7 @@ * */ @Description(name = "index", value = "_FUNC_(a, n) - Returns the n-th element of a ") +@VectorizedExpressions({ListIndexColScalar.class, ListIndexColColumn.class}) public class GenericUDFIndex extends GenericUDF { private transient MapObjectInspector mapOI; diff --git a/ql/src/test/queries/clientpositive/parquet_complex_types_vectorization.q b/ql/src/test/queries/clientpositive/parquet_complex_types_vectorization.q new file mode 100644 index 0000000000..afca8be961 --- /dev/null +++ b/ql/src/test/queries/clientpositive/parquet_complex_types_vectorization.q @@ -0,0 +1,65 @@ +set hive.mapred.mode=nonstrict; +set hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=none; + +DROP TABLE parquet_complex_types_staging; +DROP TABLE parquet_complex_types; + +CREATE TABLE parquet_complex_types_staging ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) ROW FORMAT DELIMITED +FIELDS TERMINATED BY '|' +COLLECTION ITEMS TERMINATED BY ',' +MAP KEYS TERMINATED BY ':'; + +CREATE TABLE parquet_complex_types ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) STORED AS PARQUET; + +-- test data size < 1024 +LOAD DATA LOCAL INPATH '../../data/files/parquet_complex_types.txt' OVERWRITE INTO TABLE parquet_complex_types_staging; +INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1024; + +-- verify the row number +select count(*) from parquet_complex_types; +-- test element select with constant and variable +explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10; +select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10; +-- test complex select with list +explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10; +select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10; + +-- test data size = 1024 +INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1025; + +-- verify the row number +select count(*) from parquet_complex_types; +-- test element select with constant and variable +explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10; +select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10; +-- test complex select with list +explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10; +select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10; + +-- test data size = 1025 +INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging; + +-- verify the row number +select count(*) from parquet_complex_types; +-- test element select with constant and variable +explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10; +select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10; +-- test complex select with list +explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10; +select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10; diff --git a/ql/src/test/queries/clientpositive/vector_complex_join.q b/ql/src/test/queries/clientpositive/vector_complex_join.q index 3c70d9b3e9..db407bcc62 100644 --- a/ql/src/test/queries/clientpositive/vector_complex_join.q +++ b/ql/src/test/queries/clientpositive/vector_complex_join.q @@ -5,7 +5,7 @@ SET hive.vectorized.execution.enabled=true; SET hive.auto.convert.join=true; set hive.fetch.task.conversion=none; --- From HIVE-10729. Not expected to vectorize this query. +-- The test case is updated for HIVE-18043. -- CREATE TABLE test (a INT, b MAP) STORED AS ORC; INSERT OVERWRITE TABLE test SELECT 199408978, MAP(1, "val_1", 2, "val_2") FROM src LIMIT 1; @@ -17,8 +17,8 @@ select * from alltypesorc join test where alltypesorc.cint=test.a; -CREATE TABLE test2a (a ARRAY) STORED AS ORC; -INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2) FROM src LIMIT 1; +CREATE TABLE test2a (a ARRAY, index INT) STORED AS ORC; +INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2), 1 FROM src LIMIT 1; CREATE TABLE test2b (a INT) STORED AS ORC; INSERT OVERWRITE TABLE test2b VALUES (2), (3), (4); @@ -28,3 +28,8 @@ explain vectorization expression select * from test2b join test2a on test2b.a = test2a.a[1]; select * from test2b join test2a on test2b.a = test2a.a[1]; + +explain vectorization expression +select * from test2b join test2a on test2b.a = test2a.a[test2a.index]; + +select * from test2b join test2a on test2b.a = test2a.a[test2a.index]; \ No newline at end of file diff --git a/ql/src/test/results/clientpositive/llap/parquet_complex_types_vectorization.q.out b/ql/src/test/results/clientpositive/llap/parquet_complex_types_vectorization.q.out new file mode 100644 index 0000000000..cedbb3d4d7 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/parquet_complex_types_vectorization.q.out @@ -0,0 +1,926 @@ +PREHOOK: query: DROP TABLE parquet_complex_types_staging +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE parquet_complex_types_staging +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE parquet_complex_types +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE parquet_complex_types +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE parquet_complex_types_staging ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) ROW FORMAT DELIMITED +FIELDS TERMINATED BY '|' +COLLECTION ITEMS TERMINATED BY ',' +MAP KEYS TERMINATED BY ':' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@parquet_complex_types_staging +POSTHOOK: query: CREATE TABLE parquet_complex_types_staging ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) ROW FORMAT DELIMITED +FIELDS TERMINATED BY '|' +COLLECTION ITEMS TERMINATED BY ',' +MAP KEYS TERMINATED BY ':' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@parquet_complex_types_staging +PREHOOK: query: CREATE TABLE parquet_complex_types ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) STORED AS PARQUET +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: CREATE TABLE parquet_complex_types ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) STORED AS PARQUET +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@parquet_complex_types +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/parquet_complex_types.txt' OVERWRITE INTO TABLE parquet_complex_types_staging +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@parquet_complex_types_staging +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/parquet_complex_types.txt' OVERWRITE INTO TABLE parquet_complex_types_staging +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@parquet_complex_types_staging +PREHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1024 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types_staging +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1024 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types_staging +POSTHOOK: Output: default@parquet_complex_types +POSTHOOK: Lineage: parquet_complex_types.id SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:id, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.l1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.listindex SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:listindex, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.m1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:m1, type:map, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.st1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:st1, type:struct, comment:null), ] +PREHOOK: query: select count(*) from parquet_complex_types +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from parquet_complex_types +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +1023 +PREHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1023 Data size: 120652 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: l1 (type: array), l1[0] (type: int), l1[1] (type: int), l1[listindex] (type: int), listindex (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 6, 7, 8, 4] + selectExpressions: ListIndexColScalar(col 2:array, col 0:int) -> 6:int, ListIndexColScalar(col 2:array, col 1:int) -> 7:int, ListIndexColColumn(col 2:array, col 4:int) -> 8:int + Statistics: Num rows: 1023 Data size: 120652 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1170 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1170 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs (cache only) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +[100,101] 100 101 100 0 +[102,103] 102 103 103 1 +[104,105] 104 105 104 0 +[106,107] 106 107 107 1 +[108,109] 108 109 108 0 +[110,111] 110 111 111 1 +[112,113] 112 113 112 0 +[114,115] 114 115 115 1 +[116,117] 116 117 116 0 +[118,119] 118 119 119 1 +PREHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1023 Data size: 116760 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterLongColGreaterLongScalar(col 6:int, val 1000)(children: ListIndexColScalar(col 2:array, col 0:int) -> 6:int) + predicate: (l1[0] > 1000) (type: boolean) + Statistics: Num rows: 341 Data size: 38920 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: l1[1] (type: int), l1[0] (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6, 7] + selectExpressions: ListIndexColScalar(col 2:array, col 1:int) -> 6:int, ListIndexColScalar(col 2:array, col 0:int) -> 7:int + Statistics: Num rows: 341 Data size: 38920 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 7:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 341 Data size: 38920 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 341 Data size: 38920 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs (cache only) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 1:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + keyExpressions: col 0:int + native: false + vectorProcessingMode: MERGE_PARTIAL + projectedOutputColumnNums: [0] + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 19402 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col0 (type: int) + outputColumnNames: _col0, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 0] + Statistics: Num rows: 170 Data size: 19402 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 170 Data size: 19402 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: bigint), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 0] + Statistics: Num rows: 170 Data size: 19402 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1140 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1140 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +2144 2145 +2142 2143 +2140 2141 +2138 2139 +2136 2137 +2134 2135 +2132 2133 +2130 2131 +2128 2129 +2126 2127 +PREHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1025 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types_staging +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1025 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types_staging +POSTHOOK: Output: default@parquet_complex_types +POSTHOOK: Lineage: parquet_complex_types.id SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:id, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.l1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.listindex SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:listindex, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.m1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:m1, type:map, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.st1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:st1, type:struct, comment:null), ] +PREHOOK: query: select count(*) from parquet_complex_types +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from parquet_complex_types +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +1024 +PREHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1024 Data size: 120776 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: l1 (type: array), l1[0] (type: int), l1[1] (type: int), l1[listindex] (type: int), listindex (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 6, 7, 8, 4] + selectExpressions: ListIndexColScalar(col 2:array, col 0:int) -> 6:int, ListIndexColScalar(col 2:array, col 1:int) -> 7:int, ListIndexColColumn(col 2:array, col 4:int) -> 8:int + Statistics: Num rows: 1024 Data size: 120776 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1170 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1170 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs (cache only) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +[100,101] 100 101 100 0 +[102,103] 102 103 103 1 +[104,105] 104 105 104 0 +[106,107] 106 107 107 1 +[108,109] 108 109 108 0 +[110,111] 110 111 111 1 +[112,113] 112 113 112 0 +[114,115] 114 115 115 1 +[116,117] 116 117 116 0 +[118,119] 118 119 119 1 +PREHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1024 Data size: 116880 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterLongColGreaterLongScalar(col 6:int, val 1000)(children: ListIndexColScalar(col 2:array, col 0:int) -> 6:int) + predicate: (l1[0] > 1000) (type: boolean) + Statistics: Num rows: 341 Data size: 38921 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: l1[1] (type: int), l1[0] (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6, 7] + selectExpressions: ListIndexColScalar(col 2:array, col 1:int) -> 6:int, ListIndexColScalar(col 2:array, col 0:int) -> 7:int + Statistics: Num rows: 341 Data size: 38921 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 7:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 341 Data size: 38921 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 341 Data size: 38921 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs (cache only) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 1:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + keyExpressions: col 0:int + native: false + vectorProcessingMode: MERGE_PARTIAL + projectedOutputColumnNums: [0] + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 19403 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col0 (type: int) + outputColumnNames: _col0, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 0] + Statistics: Num rows: 170 Data size: 19403 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 170 Data size: 19403 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: bigint), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 0] + Statistics: Num rows: 170 Data size: 19403 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1140 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1140 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +2146 2147 +2144 2145 +2142 2143 +2140 2141 +2138 2139 +2136 2137 +2134 2135 +2132 2133 +2130 2131 +2128 2129 +PREHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types_staging +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types_staging +POSTHOOK: Output: default@parquet_complex_types +POSTHOOK: Lineage: parquet_complex_types.id SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:id, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.l1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.listindex SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:listindex, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.m1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:m1, type:map, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.st1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:st1, type:struct, comment:null), ] +PREHOOK: query: select count(*) from parquet_complex_types +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from parquet_complex_types +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +1025 +PREHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1025 Data size: 120900 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: l1 (type: array), l1[0] (type: int), l1[1] (type: int), l1[listindex] (type: int), listindex (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 6, 7, 8, 4] + selectExpressions: ListIndexColScalar(col 2:array, col 0:int) -> 6:int, ListIndexColScalar(col 2:array, col 1:int) -> 7:int, ListIndexColColumn(col 2:array, col 4:int) -> 8:int + Statistics: Num rows: 1025 Data size: 120900 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1170 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1170 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs (cache only) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +[100,101] 100 101 100 0 +[102,103] 102 103 103 1 +[104,105] 104 105 104 0 +[106,107] 106 107 107 1 +[108,109] 108 109 108 0 +[110,111] 110 111 111 1 +[112,113] 112 113 112 0 +[114,115] 114 115 115 1 +[116,117] 116 117 116 0 +[118,119] 118 119 119 1 +PREHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1025 Data size: 117000 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterLongColGreaterLongScalar(col 6:int, val 1000)(children: ListIndexColScalar(col 2:array, col 0:int) -> 6:int) + predicate: (l1[0] > 1000) (type: boolean) + Statistics: Num rows: 341 Data size: 38923 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: l1[1] (type: int), l1[0] (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6, 7] + selectExpressions: ListIndexColScalar(col 2:array, col 1:int) -> 6:int, ListIndexColScalar(col 2:array, col 0:int) -> 7:int + Statistics: Num rows: 341 Data size: 38923 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 7:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 341 Data size: 38923 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 341 Data size: 38923 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs (cache only) + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 1:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + keyExpressions: col 0:int + native: false + vectorProcessingMode: MERGE_PARTIAL + projectedOutputColumnNums: [0] + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 19404 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col0 (type: int) + outputColumnNames: _col0, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 0] + Statistics: Num rows: 170 Data size: 19404 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 170 Data size: 19404 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: bigint), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 0] + Statistics: Num rows: 170 Data size: 19404 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 1140 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 1140 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +2148 2149 +2146 2147 +2144 2145 +2142 2143 +2140 2141 +2138 2139 +2136 2137 +2134 2135 +2132 2133 +2130 2131 diff --git a/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out b/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out index ee9e40ab5e..37827b2a2b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_complex_join.q.out @@ -159,24 +159,25 @@ POSTHOOK: Input: default@test #### A masked pattern was here #### alltypesorc.ctinyint alltypesorc.csmallint alltypesorc.cint alltypesorc.cbigint alltypesorc.cfloat alltypesorc.cdouble alltypesorc.cstring1 alltypesorc.cstring2 alltypesorc.ctimestamp1 alltypesorc.ctimestamp2 alltypesorc.cboolean1 alltypesorc.cboolean2 test.a test.b -51 NULL 199408978 -1800989684 -51.0 NULL 34N4EY63M1GFWuW0boW P4PL5h1eXR4mMLr2 1969-12-31 16:00:08.451 NULL false true 199408978 {1:"val_1",2:"val_2"} -PREHOOK: query: CREATE TABLE test2a (a ARRAY) STORED AS ORC +PREHOOK: query: CREATE TABLE test2a (a ARRAY, index INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test2a -POSTHOOK: query: CREATE TABLE test2a (a ARRAY) STORED AS ORC +POSTHOOK: query: CREATE TABLE test2a (a ARRAY, index INT) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test2a -PREHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2) FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2), 1 FROM src LIMIT 1 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test2a -POSTHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2) FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2), 1 FROM src LIMIT 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test2a POSTHOOK: Lineage: test2a.a EXPRESSION [] -_c0 +POSTHOOK: Lineage: test2a.index SIMPLE [] +_c0 _c1 PREHOOK: query: CREATE TABLE test2b (a INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -240,17 +241,17 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - outputColumnNames: _col0, _col4 + outputColumnNames: _col0, _col4, _col5 input vertices: 1 Map 2 Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col4 (type: array) - outputColumnNames: _col0, _col1 + expressions: _col0 (type: int), _col4 (type: array), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 2] + projectedOutputColumnNums: [0, 2, 3] Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -277,24 +278,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: test2a - Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 3:int)(children: ListIndexColScalar(col 0:array, col 1:int) -> 3:int) predicate: a[1] is not null (type: boolean) - Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: a[1] (type: int) sort order: + Map-reduce partition columns: a[1] (type: int) - Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: NONE - value expressions: a (type: array) - Execution mode: llap + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyExpressions: ListIndexColScalar(col 0:array, col 1:int) -> 3:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE + value expressions: a (type: array), index (type: int) + Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - notVectorizedReason: FILTER operator: Unexpected hive type name array - vectorized: false + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -312,5 +327,140 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test2a POSTHOOK: Input: default@test2b #### A masked pattern was here #### -test2b.a test2a.a -2 [1,2] +test2b.a test2a.a test2a.index +2 [1,2] 1 +PREHOOK: query: explain vectorization expression +select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 2 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: test2b + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 0:int) + predicate: a is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 a (type: int) + 1 a[index] (type: int) + Map Join Vectorization: + className: VectorMapJoinInnerLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col4, _col5 + input vertices: + 1 Map 2 + Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col4 (type: array), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 2, 3] + Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: test2a + Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 3:int)(children: ListIndexColColumn(col 0:array, col 1:int) -> 3:int) + predicate: a[index] is not null (type: boolean) + Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: a[index] (type: int) + sort order: + + Map-reduce partition columns: a[index] (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyExpressions: ListIndexColColumn(col 0:array, col 1:int) -> 3:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE + value expressions: a (type: array), index (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +PREHOOK: type: QUERY +PREHOOK: Input: default@test2a +PREHOOK: Input: default@test2b +#### A masked pattern was here #### +POSTHOOK: query: select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test2a +POSTHOOK: Input: default@test2b +#### A masked pattern was here #### +test2b.a test2a.a test2a.index +2 [1,2] 1 diff --git a/ql/src/test/results/clientpositive/parquet_complex_types_vectorization.q.out b/ql/src/test/results/clientpositive/parquet_complex_types_vectorization.q.out new file mode 100644 index 0000000000..03488a1cba --- /dev/null +++ b/ql/src/test/results/clientpositive/parquet_complex_types_vectorization.q.out @@ -0,0 +1,878 @@ +PREHOOK: query: DROP TABLE parquet_complex_types_staging +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE parquet_complex_types_staging +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE parquet_complex_types +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE parquet_complex_types +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE parquet_complex_types_staging ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) ROW FORMAT DELIMITED +FIELDS TERMINATED BY '|' +COLLECTION ITEMS TERMINATED BY ',' +MAP KEYS TERMINATED BY ':' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@parquet_complex_types_staging +POSTHOOK: query: CREATE TABLE parquet_complex_types_staging ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) ROW FORMAT DELIMITED +FIELDS TERMINATED BY '|' +COLLECTION ITEMS TERMINATED BY ',' +MAP KEYS TERMINATED BY ':' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@parquet_complex_types_staging +PREHOOK: query: CREATE TABLE parquet_complex_types ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) STORED AS PARQUET +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: CREATE TABLE parquet_complex_types ( +id int, +m1 map, +l1 array, +st1 struct, +listIndex int +) STORED AS PARQUET +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@parquet_complex_types +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/parquet_complex_types.txt' OVERWRITE INTO TABLE parquet_complex_types_staging +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@parquet_complex_types_staging +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/parquet_complex_types.txt' OVERWRITE INTO TABLE parquet_complex_types_staging +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@parquet_complex_types_staging +PREHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1024 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types_staging +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1024 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types_staging +POSTHOOK: Output: default@parquet_complex_types +POSTHOOK: Lineage: parquet_complex_types.id SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:id, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.l1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.listindex SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:listindex, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.m1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:m1, type:map, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.st1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:st1, type:struct, comment:null), ] +PREHOOK: query: select count(*) from parquet_complex_types +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from parquet_complex_types +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +1023 +PREHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1023 Data size: 5115 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: l1 (type: array), l1[0] (type: int), l1[1] (type: int), l1[listindex] (type: int), listindex (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 6, 7, 8, 4] + selectExpressions: ListIndexColScalar(col 2:array, col 0:int) -> 6:int, ListIndexColScalar(col 2:array, col 1:int) -> 7:int, ListIndexColColumn(col 2:array, col 4:int) -> 8:int + Statistics: Num rows: 1023 Data size: 5115 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +[100,101] 100 101 100 0 +[102,103] 102 103 103 1 +[104,105] 104 105 104 0 +[106,107] 106 107 107 1 +[108,109] 108 109 108 0 +[110,111] 110 111 111 1 +[112,113] 112 113 112 0 +[114,115] 114 115 115 1 +[116,117] 116 117 116 0 +[118,119] 118 119 119 1 +PREHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1023 Data size: 5115 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterLongColGreaterLongScalar(col 6:int, val 1000)(children: ListIndexColScalar(col 2:array, col 0:int) -> 6:int) + predicate: (l1[0] > 1000) (type: boolean) + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: l1[1] (type: int), l1[0] (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6, 7] + selectExpressions: ListIndexColScalar(col 2:array, col 1:int) -> 6:int, ListIndexColScalar(col 2:array, col 0:int) -> 7:int + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 7:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col0 (type: int) + outputColumnNames: _col0, _col2 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + TableScan Vectorization: + native: true + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: bigint), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +2144 2145 +2142 2143 +2140 2141 +2138 2139 +2136 2137 +2134 2135 +2132 2133 +2130 2131 +2128 2129 +2126 2127 +PREHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1025 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types_staging +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging where id < 1025 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types_staging +POSTHOOK: Output: default@parquet_complex_types +POSTHOOK: Lineage: parquet_complex_types.id SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:id, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.l1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.listindex SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:listindex, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.m1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:m1, type:map, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.st1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:st1, type:struct, comment:null), ] +PREHOOK: query: select count(*) from parquet_complex_types +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from parquet_complex_types +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +1024 +PREHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1024 Data size: 5120 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: l1 (type: array), l1[0] (type: int), l1[1] (type: int), l1[listindex] (type: int), listindex (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 6, 7, 8, 4] + selectExpressions: ListIndexColScalar(col 2:array, col 0:int) -> 6:int, ListIndexColScalar(col 2:array, col 1:int) -> 7:int, ListIndexColColumn(col 2:array, col 4:int) -> 8:int + Statistics: Num rows: 1024 Data size: 5120 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +[100,101] 100 101 100 0 +[102,103] 102 103 103 1 +[104,105] 104 105 104 0 +[106,107] 106 107 107 1 +[108,109] 108 109 108 0 +[110,111] 110 111 111 1 +[112,113] 112 113 112 0 +[114,115] 114 115 115 1 +[116,117] 116 117 116 0 +[118,119] 118 119 119 1 +PREHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1024 Data size: 5120 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterLongColGreaterLongScalar(col 6:int, val 1000)(children: ListIndexColScalar(col 2:array, col 0:int) -> 6:int) + predicate: (l1[0] > 1000) (type: boolean) + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: l1[1] (type: int), l1[0] (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6, 7] + selectExpressions: ListIndexColScalar(col 2:array, col 1:int) -> 6:int, ListIndexColScalar(col 2:array, col 0:int) -> 7:int + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 7:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col0 (type: int) + outputColumnNames: _col0, _col2 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + TableScan Vectorization: + native: true + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: bigint), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +2146 2147 +2144 2145 +2142 2143 +2140 2141 +2138 2139 +2136 2137 +2134 2135 +2132 2133 +2130 2131 +2128 2129 +PREHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types_staging +PREHOOK: Output: default@parquet_complex_types +POSTHOOK: query: INSERT OVERWRITE TABLE parquet_complex_types +SELECT id, m1, l1, st1, listIndex FROM parquet_complex_types_staging +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types_staging +POSTHOOK: Output: default@parquet_complex_types +POSTHOOK: Lineage: parquet_complex_types.id SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:id, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.l1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.listindex SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:listindex, type:int, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.m1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:m1, type:map, comment:null), ] +POSTHOOK: Lineage: parquet_complex_types.st1 SIMPLE [(parquet_complex_types_staging)parquet_complex_types_staging.FieldSchema(name:st1, type:struct, comment:null), ] +PREHOOK: query: select count(*) from parquet_complex_types +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from parquet_complex_types +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +1025 +PREHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1025 Data size: 5125 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: l1 (type: array), l1[0] (type: int), l1[1] (type: int), l1[listindex] (type: int), listindex (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 6, 7, 8, 4] + selectExpressions: ListIndexColScalar(col 2:array, col 0:int) -> 6:int, ListIndexColScalar(col 2:array, col 1:int) -> 7:int, ListIndexColColumn(col 2:array, col 4:int) -> 8:int + Statistics: Num rows: 1025 Data size: 5125 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select l1, l1[0], l1[1], l1[listIndex], listIndex from parquet_complex_types limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +[100,101] 100 101 100 0 +[102,103] 102 103 103 1 +[104,105] 104 105 104 0 +[106,107] 106 107 107 1 +[108,109] 108 109 108 0 +[110,111] 110 111 111 1 +[112,113] 112 113 112 0 +[114,115] 114 115 115 1 +[116,117] 116 117 116 0 +[118,119] 118 119 119 1 +PREHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: parquet_complex_types + Statistics: Num rows: 1025 Data size: 5125 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterLongColGreaterLongScalar(col 6:int, val 1000)(children: ListIndexColScalar(col 2:array, col 0:int) -> 6:int) + predicate: (l1[0] > 1000) (type: boolean) + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: l1[1] (type: int), l1[0] (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6, 7] + selectExpressions: ListIndexColScalar(col 2:array, col 1:int) -> 6:int, ListIndexColScalar(col 2:array, col 0:int) -> 7:int + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 7:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 341 Data size: 1705 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col0 (type: int) + outputColumnNames: _col0, _col2 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + TableScan Vectorization: + native: true + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.SequenceFileInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: bigint), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 170 Data size: 850 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 50 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +POSTHOOK: query: select sum(l1[0]), l1[1] from parquet_complex_types where l1[0] > 1000 group by l1[1] order by l1[1] desc limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@parquet_complex_types +#### A masked pattern was here #### +2148 2149 +2146 2147 +2144 2145 +2142 2143 +2140 2141 +2138 2139 +2136 2137 +2134 2135 +2132 2133 +2130 2131 diff --git a/ql/src/test/results/clientpositive/vector_complex_join.q.out b/ql/src/test/results/clientpositive/vector_complex_join.q.out index f8501f6408..2d45b6bdee 100644 --- a/ql/src/test/results/clientpositive/vector_complex_join.q.out +++ b/ql/src/test/results/clientpositive/vector_complex_join.q.out @@ -134,24 +134,25 @@ POSTHOOK: Input: default@test #### A masked pattern was here #### alltypesorc.ctinyint alltypesorc.csmallint alltypesorc.cint alltypesorc.cbigint alltypesorc.cfloat alltypesorc.cdouble alltypesorc.cstring1 alltypesorc.cstring2 alltypesorc.ctimestamp1 alltypesorc.ctimestamp2 alltypesorc.cboolean1 alltypesorc.cboolean2 test.a test.b -51 NULL 199408978 -1800989684 -51.0 NULL 34N4EY63M1GFWuW0boW P4PL5h1eXR4mMLr2 1969-12-31 16:00:08.451 NULL false true 199408978 {1:"val_1",2:"val_2"} -PREHOOK: query: CREATE TABLE test2a (a ARRAY) STORED AS ORC +PREHOOK: query: CREATE TABLE test2a (a ARRAY, index INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test2a -POSTHOOK: query: CREATE TABLE test2a (a ARRAY) STORED AS ORC +POSTHOOK: query: CREATE TABLE test2a (a ARRAY, index INT) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test2a -PREHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2) FROM src LIMIT 1 +PREHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2), 1 FROM src LIMIT 1 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test2a -POSTHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2) FROM src LIMIT 1 +POSTHOOK: query: INSERT OVERWRITE TABLE test2a SELECT ARRAY(1, 2), 1 FROM src LIMIT 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test2a POSTHOOK: Lineage: test2a.a EXPRESSION [] -_c0 +POSTHOOK: Lineage: test2a.index SIMPLE [] +_c0 _c1 PREHOOK: query: CREATE TABLE test2b (a INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -209,35 +210,57 @@ STAGE PLANS: Map Operator Tree: TableScan alias: test2a - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 3:int)(children: ListIndexColScalar(col 0:array, col 1:int) -> 3:int) predicate: a[1] is not null (type: boolean) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 a (type: int) 1 a[1] (type: int) - outputColumnNames: _col0, _col4 + Map Join Vectorization: + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col4, _col5 Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col4 (type: array) - outputColumnNames: _col0, _col1 + expressions: _col0 (type: int), _col4 (type: array), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - notVectorizedReason: FILTER operator: Unexpected hive type name array - vectorized: false + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Local Work: Map Reduce Local Work @@ -257,5 +280,118 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test2a POSTHOOK: Input: default@test2b #### A masked pattern was here #### -test2b.a test2a.a -2 [1,2] +test2b.a test2a.a test2a.index +2 [1,2] 1 +PREHOOK: query: explain vectorization expression +select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + test2b + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + test2b + TableScan + alias: test2b + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: a is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 a (type: int) + 1 a[index] (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test2a + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 3:int)(children: ListIndexColColumn(col 0:array, col 1:int) -> 3:int) + predicate: a[index] is not null (type: boolean) + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 a (type: int) + 1 a[index] (type: int) + Map Join Vectorization: + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col4, _col5 + Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col4 (type: array), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +PREHOOK: type: QUERY +PREHOOK: Input: default@test2a +PREHOOK: Input: default@test2b +#### A masked pattern was here #### +POSTHOOK: query: select * from test2b join test2a on test2b.a = test2a.a[test2a.index] +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test2a +POSTHOOK: Input: default@test2b +#### A masked pattern was here #### +test2b.a test2a.a test2a.index +2 [1,2] 1