diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index e540d02..a0952e1 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3449,6 +3449,23 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "Exceeding this will trigger a flush irrelevant of memory pressure condition."), HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT("hive.vectorized.groupby.flush.percent", (float) 0.1, "Percent of entries in the group by aggregation hash flushed when the memory threshold is exceeded."), + HIVE_VECTORIZATION_GROUPBY_NATIVE_ENABLED( + "hive.vectorized.execution.groupby.native.enabled", true, + "This flag should be set to true to enable the native vectorization of queries using GroupBy.\n" + + "The default value is true."), + HIVE_TEST_VECTORIZATION_GROUPBY_NATIVE_OVERRIDE( + "hive.test.vectorized.execution.groupby.native.override", + "none", new StringSet("none", "enable", "disable"), + "internal use only, used to override the hive.vectorized.execution.groupby.native.enabled\n" + + "setting. Using enable will force it on and disable will force it off.\n" + + "The default none is do nothing, of course", + true), + HIVE_TEST_VECTORIZATION_GROUPBY_NATIVE_MAX_MEMORY_AVAILABLE( + "hive.test.vectorized.groupby.native.max.memory.available", -1, + "internal use only, used for creating different vectorized hash table sizes\n" + + "to exercise more logic\n" + + "The default value is -1 which means don't use it", + true), HIVE_VECTORIZATION_REDUCESINK_NEW_ENABLED("hive.vectorized.execution.reducesink.new.enabled", true, "This flag should be set to true to enable the new vectorization\n" + "of queries using ReduceSink.\ni" + diff --git data/files/groupby_long_1a.txt data/files/groupby_long_1a.txt new file mode 100644 index 0000000..8cf831f --- /dev/null +++ data/files/groupby_long_1a.txt @@ -0,0 +1,11 @@ +-5310365297525168078 +-6187919478609154811 +968819023021777205 +3313583664488247651 +-5206670856103795573 +\N +-6187919478609154811 +1569543799237464101 +-6187919478609154811 +-8460550397108077433 +-6187919478609154811 diff --git data/files/groupby_long_1a_nonull.txt data/files/groupby_long_1a_nonull.txt new file mode 100644 index 0000000..b2325ad --- /dev/null +++ data/files/groupby_long_1a_nonull.txt @@ -0,0 +1,10 @@ +1569543799237464101 +-6187919478609154811 +968819023021777205 +-8460550397108077433 +-6187919478609154811 +-5310365297525168078 +-6187919478609154811 +-5206670856103795573 +3313583664488247651 +-6187919478609154811 diff --git data/files/groupby_long_1b.txt data/files/groupby_long_1b.txt new file mode 100644 index 0000000..87c2b3c --- /dev/null +++ data/files/groupby_long_1b.txt @@ -0,0 +1,13 @@ +\N +31713 +31713 +31713 +31713 +32030 +31713 +-25394 +31713 +31713 +31713 +31713 +31713 diff --git data/files/groupby_long_1b_nonull.txt data/files/groupby_long_1b_nonull.txt new file mode 100644 index 0000000..0b438a2 --- /dev/null +++ data/files/groupby_long_1b_nonull.txt @@ -0,0 +1,12 @@ +31713 +31713 +31713 +31713 +32030 +31713 +-25394 +31713 +31713 +31713 +31713 +31713 diff --git data/files/groupby_long_1c.txt data/files/groupby_long_1c.txt new file mode 100644 index 0000000..2d13c26 --- /dev/null +++ data/files/groupby_long_1c.txt @@ -0,0 +1,11 @@ +1928928239,\N +-1437463633,YYXPPCH +-1437463633,TKTKGVGFW +1725068083,MKSCCE +1928928239,\N +\N,ABBZ +1928928239,AMKTIWQ +-1437463633,JU +1928928239,VAQHVRI +-1437463633,SOWDWMS +-1437463633,\N diff --git data/files/groupby_long_1c_nonull.txt data/files/groupby_long_1c_nonull.txt new file mode 100644 index 0000000..f6bc6e8 --- /dev/null +++ data/files/groupby_long_1c_nonull.txt @@ -0,0 +1,10 @@ +1928928239,\N +-1437463633,YYXPPCH +-1437463633,TKTKGVGFW +1725068083,MKSCCE +1928928239,\N +1928928239,AMKTIWQ +-1437463633,JU +1928928239,VAQHVRI +-1437463633,SOWDWMS +-1437463633,\N diff --git data/files/groupby_serialize_1a.txt data/files/groupby_serialize_1a.txt new file mode 100644 index 0000000..cae1ecc --- /dev/null +++ data/files/groupby_serialize_1a.txt @@ -0,0 +1,17 @@ +2061-12-19 22:10:32.000628309 +\N +2686-05-23 07:46:46.565832918 +2082-07-14 04:00:40.695380469 +2188-06-04 15:03:14.963259704 +2608-02-23 23:44:02.546440891 +2093-04-10 23:36:54.846 +2898-10-01 22:27:02.000871113 +2306-06-21 11:02:00.143124239 +\N +\N +2306-06-21 11:02:00.143124239 +2093-04-10 23:36:54.846 +\N +2686-05-23 07:46:46.565832918 +2093-04-10 23:36:54.846 +2299-11-15 16:41:30.401 diff --git data/files/groupby_serialize_1a_nonull.txt data/files/groupby_serialize_1a_nonull.txt new file mode 100644 index 0000000..0520a9a --- /dev/null +++ data/files/groupby_serialize_1a_nonull.txt @@ -0,0 +1,13 @@ +2061-12-19 22:10:32.000628309 +2686-05-23 07:46:46.565832918 +2082-07-14 04:00:40.695380469 +2188-06-04 15:03:14.963259704 +2608-02-23 23:44:02.546440891 +2093-04-10 23:36:54.846 +2898-10-01 22:27:02.000871113 +2306-06-21 11:02:00.143124239 +2306-06-21 11:02:00.143124239 +2093-04-10 23:36:54.846 +2686-05-23 07:46:46.565832918 +2093-04-10 23:36:54.846 +2299-11-15 16:41:30.401 diff --git data/files/groupby_serialize_1b.txt data/files/groupby_serialize_1b.txt new file mode 100644 index 0000000..c47bae0 --- /dev/null +++ data/files/groupby_serialize_1b.txt @@ -0,0 +1,47 @@ +2304-12-15 15:31:16,11101,YJCKKCR,-0.2 +2018-11-25 22:27:55.84,-12202,VBDBM,7506645.9537 +1957-03-06 09:57:31,-26373,NXLNNSO,2 +2332-06-14 07:02:42.32,-26373,XFFFDTQ,56845106806308.9 +2535-03-01 05:04:49.000525883,23663,ALIQKNXHE,-0.1665691 +2629-04-07 01:54:11,-6776,WGGFVFTW,6.8012851708 +2266-09-26 06:27:29.000284762,20223,EDYJJN,14 +2969-01-23 14:08:04.000667259,-18138,VDPN,8924831210.42768019 +2861-05-27 07:13:01.000848622,-19598,WKPXNLXS,29399 +2301-06-03 17:16:19,15332,ZVEUKC,0.5 +1980-09-13 19:57:15,\N,M,57650.7723 +2304-12-15 15:31:16,1301,T,-0.8 +2461-03-09 09:54:45.000982385,-16454,ZSMB,-991.43605 +2044-05-02 07:00:03.35,-8751,ZSMB,-453797242.029791752 +2409-09-23 10:33:27,2638,XSXR,-9926693851 +1941-10-16 02:19:36.000423663,-24459,AO,-821445414.4579712 +2512-10-06 03:03:03,-3465,VZQ,-49.51219 +2971-02-14 09:13:19,-16605,BVACIRP,-5.751278023 +2075-10-25 20:32:40.000792874,\N,\N,226612651968.36076 +2073-03-21 15:32:57.617920888,26425,MPRACIRYW,5 +2969-01-23 14:08:04.000667259,14500,WXLTRFQP,-23.8198 +2898-12-18 03:37:17,-24459,MHNBXPBM,14.23669356238481 +\N,\N,\N,-2207.3 +2391-01-17 15:28:37.00045143,16160,ZVEUKC,771355639420297.133 +2309-01-15 12:43:49,22821,ZMY,40.9 +2340-12-15 05:15:17.133588982,23663,HHTP,33383.8 +2969-01-23 14:08:04.000667259,-8913,UIMQ,9.178 +2145-10-15 06:58:42.831,2638,\N,-9784.82 +2888-05-08 08:36:55.182302102,5786,ZVEUKC,-56082455.033918 +2467-05-11 06:04:13.426693647,23196,EIBSDASR,-8.5548883801 +2829-06-04 08:01:47.836,22771,ZVEUKC,94317.75318 +2938-12-21 23:35:59.498,29362,ZMY,0.88 +2304-12-15 15:31:16,-13125,JFYW,6.086657 +2808-07-09 02:10:11.928498854,-19598,FHFX,0.3 +2083-06-07 09:35:19.383,-26373,MR,-394.0867 +2686-05-23 07:46:46.565832918,13212,NCYBDW,-917116793.4 +2969-01-23 14:08:04.000667259,-8913,UIMQ,-375994644577.315257 +2338-02-12 09:30:07,20223,CTH,-6154.763054 +2629-04-07 01:54:11,-6776,WGGFVFTW,41.77451507786646 +2242-08-04 07:51:46.905,20223,UCYXACQ,37.7288 +2637-03-12 22:25:46.385,-12923,PPTJPFR,5.4 +2304-12-15 15:31:16,8650,RLNO,0.71351747335 +2688-02-06 20:58:42.000947837,20223,PAIY,67661.735 +\N,\N,\N,-2.4 +2512-10-06 03:03:03,-3465,VZQ,0.4458 +2960-04-12 07:03:42.000366651,20340,CYZYUNSF,-96.3 +2461-03-09 09:54:45.000982385,-16454,ZSMB,-9575827.55396 \ No newline at end of file diff --git data/files/groupby_serialize_1b_nonull.txt data/files/groupby_serialize_1b_nonull.txt new file mode 100644 index 0000000..e640b42 --- /dev/null +++ data/files/groupby_serialize_1b_nonull.txt @@ -0,0 +1,66 @@ +2304-12-15 15:31:16,11101,YJCKKCR,-0.2 +2018-11-25 22:27:55.84,-12202,VBDBM,7506645.9537 +1957-03-06 09:57:31,-26373,NXLNNSO,2 +2332-06-14 07:02:42.32,-26373,XFFFDTQ,56845106806308.9 +2535-03-01 05:04:49.000525883,23663,ALIQKNXHE,-0.1665691 +2629-04-07 01:54:11,-6776,WGGFVFTW,6.8012851708 +2266-09-26 06:27:29.000284762,20223,EDYJJN,14 +2969-01-23 14:08:04.000667259,-18138,VDPN,8924831210.42768019 +2861-05-27 07:13:01.000848622,-19598,WKPXNLXS,29399 +2301-06-03 17:16:19,15332,ZVEUKC,0.5 +1980-09-13 19:57:15,\N,M,57650.7723 +2304-12-15 15:31:16,1301,T,-0.8 +2461-03-09 09:54:45.000982385,-16454,ZSMB,-991.43605 +2044-05-02 07:00:03.35,-8751,ZSMB,-453797242.029791752 +2409-09-23 10:33:27,2638,XSXR,-9926693851 +1941-10-16 02:19:36.000423663,-24459,AO,-821445414.4579712 +2512-10-06 03:03:03,-3465,VZQ,-49.51219 +2971-02-14 09:13:19,-16605,BVACIRP,-5.751278023 +2075-10-25 20:32:40.000792874,\N,\N,226612651968.36076 +2073-03-21 15:32:57.617920888,26425,MPRACIRYW,5 +2969-01-23 14:08:04.000667259,14500,WXLTRFQP,-23.8198 +2898-12-18 03:37:17,-24459,MHNBXPBM,14.23669356238481 +2391-01-17 15:28:37.00045143,16160,ZVEUKC,771355639420297.133 +2309-01-15 12:43:49,22821,ZMY,40.9 +2340-12-15 05:15:17.133588982,23663,HHTP,33383.8 +2969-01-23 14:08:04.000667259,-8913,UIMQ,9.178 +2145-10-15 06:58:42.831,2638,\N,-9784.82 +2888-05-08 08:36:55.182302102,5786,ZVEUKC,-56082455.033918 +2467-05-11 06:04:13.426693647,23196,EIBSDASR,-8.5548883801 +2829-06-04 08:01:47.836,22771,ZVEUKC,94317.75318 +2938-12-21 23:35:59.498,29362,ZMY,0.88 +2304-12-15 15:31:16,-13125,JFYW,6.086657 +2808-07-09 02:10:11.928498854,-19598,FHFX,0.3 +2083-06-07 09:35:19.383,-26373,MR,-394.0867 +2686-05-23 07:46:46.565832918,13212,NCYBDW,-917116793.4 +2969-01-23 14:08:04.000667259,-8913,UIMQ,-375994644577.315257 +2338-02-12 09:30:07,20223,CTH,-6154.763054 +2629-04-07 01:54:11,-6776,WGGFVFTW,41.77451507786646 +2242-08-04 07:51:46.905,20223,UCYXACQ,37.7288 +2637-03-12 22:25:46.385,-12923,PPTJPFR,5.4 +2304-12-15 15:31:16,8650,RLNO,0.71351747335 +2688-02-06 20:58:42.000947837,20223,PAIY,67661.735 +2512-10-06 03:03:03,-3465,VZQ,0.4458 +2960-04-12 07:03:42.000366651,20340,CYZYUNSF,-96.3 +2461-03-09 09:54:45.000982385,-16454,ZSMB,-9575827.55396 +2512-10-06 03:03:03,1560,X,-922.6951584107 +2396-04-06 15:39:02.404013577,29661,ZSMB,0.76718326 +2409-09-23 10:33:27,2638,XSXR,0.4 +2969-01-23 14:08:04.000667259,6689,TFGVOGPJF,-0.01 +2333-07-28 09:59:26,23196,RKSK,37872288434740893.5 +2409-09-23 10:33:27,2638,XSXR,-162.95 +2357-05-08 07:09:09.000482799,6226,ZSMB,-472 +2304-12-15 15:31:16,15090,G,-4319470286240016.3 +2304-12-15 15:31:16,1301,T,61.302 +2105-01-04 16:27:45,23100,ZSMB,-83.2328 +2242-08-04 07:51:46.905,20223,UCYXACQ,-0.26149 +2637-03-12 22:25:46.385,-17786,HYEGQ,-84.169614329419 +1931-12-04 11:13:47.269597392,23196,HVJCQMTQL,-9697532.8994 +2897-08-10 15:21:47.09,23663,XYUVBED,6370 +2888-05-08 08:36:55.182302102,5786,ZVEUKC,57.62175257788037 +2145-10-15 06:58:42.831,2638,UANGISEXR,-5996.306 +2462-12-16 23:11:32.633305644,-26373,CB,67.41799 +2396-04-06 15:39:02.404013577,29661,ZSMB,-5151598.347 +2304-12-15 15:31:16,15090,G,975 +2512-10-06 03:03:03,32099,ARNZ,-0.41 +2188-06-04 15:03:14.963259704,9468,AAA,2.75496352 \ No newline at end of file diff --git data/files/groupby_string_1a.txt data/files/groupby_string_1a.txt new file mode 100644 index 0000000..1cbcd05 --- /dev/null +++ data/files/groupby_string_1a.txt @@ -0,0 +1,13 @@ +FTWURVH +QNCYBDW +UA +WXHJ +\N +WXHJ +PXLD +WXHJ +PXLD +WXHJ +WXHJ +MXGDMBD +PXLD diff --git data/files/groupby_string_1a_nonull.txt data/files/groupby_string_1a_nonull.txt new file mode 100644 index 0000000..a6566f2 --- /dev/null +++ data/files/groupby_string_1a_nonull.txt @@ -0,0 +1,12 @@ +WXHJ +WXHJ +FTWURVH +MXGDMBD +UA +WXHJ +QNCYBDW +PXLD +PXLD +WXHJ +PXLD +WXHJ diff --git data/files/groupby_string_1c.txt data/files/groupby_string_1c.txt new file mode 100644 index 0000000..f223da0 --- /dev/null +++ data/files/groupby_string_1c.txt @@ -0,0 +1,38 @@ +BDBMW,2278-04-27,2101-02-21 08:53:34.692 +FROPIK,2023-02-28,2467-05-11 06:04:13.426693647 +GOYJHW,1976-03-06,2805-07-10 10:51:57.00083302 +MXGDMBD,1880-11-01,2765-10-06 13:28:17.000688592 +CQMTQLI,2031-09-13,1927-02-13 08:39:25.000919094 +,1985-01-22,2111-01-10 15:44:28 +IOQIDQBHU,2198-02-08,2073-03-21 15:32:57.617920888 +GSJPSIYOU,1948-07-17,2006-09-24 16:01:24.000239251 +\N,1865-11-08,2893-04-07 07:36:12 +BEP,2206-08-10,2331-10-09 10:59:51 +NADANUQMW,2037-10-19,2320-04-26 18:50:25.000426922 +\N,2250-04-22,2548-03-21 08:23:13.133573801 +ATZJTPECF,1829-10-16,2357-05-08 07:09:09.000482799 +IWEZJHKE,\N,\N +AARNZRVZQ,2002-10-23,2525-05-12 15:59:35 +BEP,2141-02-19,2521-06-09 01:20:07.121 +AARNZRVZQ,2000-11-13,2309-06-05 19:54:13 +LOTLS,1957-11-09,2092-06-07 06:42:30.000538454 +FROPIK,2124-10-01,2974-07-06 12:05:08.000146048 +KL,1980-09-22,2073-08-25 11:51:10.318 +\N,1915-02-22,2554-10-27 09:34:30 +WNGFTTY,1843-06-10,2411-01-28 20:03:59 +VNRXWQ,1883-02-06,2287-07-17 16:46:58.287 +QTSRKSKB,2144-01-13,2627-12-20 03:38:53.000389266 +GOYJHW,1959-04-27,\N +LOTLS,2099-08-04,2181-01-25 01:04:25.000030055 +CQMTQLI,2090-11-13,2693-03-17 16:19:55.82 +VNRXWQ,2276-11-16,2072-08-16 17:45:47.48349887 +LOTLS,2126-09-16,1977-12-15 15:28:56 +FTWURVH,1976-03-10,2683-11-22 13:07:04.66673556 +,2021-02-21,2802-04-21 18:48:18.5933838 +ZNOUDCR,\N,1988-04-23 08:40:21 +FROPIK,2214-02-09,1949-08-18 17:14:38.000703738 +SDA,2196-04-12,2462-10-26 19:28:12.733 +WNGFTTY,2251-08-16,2649-12-21 18:30:42.498 +GOYJHW,1993-04-07,1950-05-04 09:28:22.000114784 +FYW,1807-03-20,2305-08-17 01:32:44 +ATZJTPECF,2217-10-22,2808-10-20 16:01:24.558 diff --git data/files/groupby_string_1c_nonull.txt data/files/groupby_string_1c_nonull.txt new file mode 100644 index 0000000..6b97ef4 --- /dev/null +++ data/files/groupby_string_1c_nonull.txt @@ -0,0 +1,35 @@ +LOTLS,2126-09-16,1977-12-15 15:28:56 +MXGDMBD,1880-11-01,2765-10-06 13:28:17.000688592 +WNGFTTY,2251-08-16,2649-12-21 18:30:42.498 +QTSRKSKB,2144-01-13,2627-12-20 03:38:53.000389266 +AARNZRVZQ,2002-10-23,2525-05-12 15:59:35 +BEP,2141-02-19,2521-06-09 01:20:07.121 +ZNOUDCR,\N,1988-04-23 08:40:21 +FROPIK,2023-02-28,2467-05-11 06:04:13.426693647 +GOYJHW,1993-04-07,1950-05-04 09:28:22.000114784 +CQMTQLI,2090-11-13,2693-03-17 16:19:55.82 +BDBMW,2278-04-27,2101-02-21 08:53:34.692 +AARNZRVZQ,2000-11-13,2309-06-05 19:54:13 +FYW,1807-03-20,2305-08-17 01:32:44 +,2021-02-21,2802-04-21 18:48:18.5933838 +VNRXWQ,1883-02-06,2287-07-17 16:46:58.287 +FROPIK,2124-10-01,2974-07-06 12:05:08.000146048 +LOTLS,2099-08-04,2181-01-25 01:04:25.000030055 +BEP,2206-08-10,2331-10-09 10:59:51 +WNGFTTY,1843-06-10,2411-01-28 20:03:59 +LOTLS,1957-11-09,2092-06-07 06:42:30.000538454 +CQMTQLI,2031-09-13,1927-02-13 08:39:25.000919094 +GOYJHW,1976-03-06,2805-07-10 10:51:57.00083302 +,1985-01-22,2111-01-10 15:44:28 +SDA,2196-04-12,2462-10-26 19:28:12.733 +ATZJTPECF,1829-10-16,2357-05-08 07:09:09.000482799 +GOYJHW,1959-04-27,\N +FTWURVH,1976-03-10,2683-11-22 13:07:04.66673556 +KL,1980-09-22,2073-08-25 11:51:10.318 +ATZJTPECF,2217-10-22,2808-10-20 16:01:24.558 +NADANUQMW,2037-10-19,2320-04-26 18:50:25.000426922 +FROPIK,2214-02-09,1949-08-18 17:14:38.000703738 +IWEZJHKE,\N,\N +GSJPSIYOU,1948-07-17,2006-09-24 16:01:24.000239251 +IOQIDQBHU,2198-02-08,2073-03-21 15:32:57.617920888 +VNRXWQ,2276-11-16,2072-08-16 17:45:47.48349887 diff --git ql/pom.xml ql/pom.xml index 165610f..ed0dc8a 100644 --- ql/pom.xml +++ ql/pom.xml @@ -835,6 +835,7 @@ classpath="${compile.classpath}"/> + diff --git ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyCommonLines.txt ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyCommonLines.txt new file mode 100644 index 0000000..f439fd7 --- /dev/null +++ ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyCommonLines.txt @@ -0,0 +1,247 @@ +#COMMENT +#COMMENT These code line snippets are intended to: +#COMMENT 1) Reduce code duplication +#COMMENT 2) To not incur the cost of calling methods or having abstract objects +#COMMENT 3) Or, to have to to parameterize for methods. +#COMMENT 4) Separate the the key variation variables and logic from the common loop logic. +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Helpful variables for accessing the key values for the LONG and STRING variations. +#COMMENT (None needed for SERIALIZE) +#COMMENT +#BEGIN_LINES KEY_VECTOR_VARIABLES +#IF LONG_KEY + long[] keyVector = keyColVector.vector; +#ENDIF LONG_KEY +#IF STRING_KEY + final byte[][] keyVector = keyColVector.vector; + final int[] keyStart = keyColVector.start; + final int[] keyLength = keyColVector.length; +#ENDIF STRING_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Current key values for logical (i.e. selectedInUse) and the batch's keys have no +#COMMENT NULLs case. All variations. +#COMMENT +#BEGIN_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + final int firstBatchIndex = selected[0]; +#IF LONG_KEY + long currentKey = keyVector[firstBatchIndex]; +#ENDIF LONG_KEY +#IF STRING_KEY + byte[] currentKey = keyVector[firstBatchIndex]; + int currentKeyStart = keyStart[firstBatchIndex]; + int currentKeyLength = keyLength[firstBatchIndex]; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, firstBatchIndex); + byte[] currentKey = currentKeyOutput.getData(); + int currentKeyLength = currentKeyOutput.getLength(); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Current key values for logical (i.e. selectedInUse) and the batch's keys may have +#COMMENT NULLs case. All variations. +#COMMENT +#BEGIN_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + boolean[] keyIsNull = keyColVector.isNull; + boolean currKeyIsNull; + +#IF LONG_KEY + long currentKey; +#ENDIF LONG_KEY +#IF STRING_KEY + byte[] currentKey; + int currentKeyStart; + int currentKeyLength; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + byte[] currentKey; + int currentKeyLength; +#ENDIF SERIALIZE_KEY + final int firstBatchIndex = selected[0]; + if (keyIsNull[firstBatchIndex]) { + currKeyIsNull = true; +#IF LONG_KEY + currentKey = 0; +#ENDIF LONG_KEY +#IF STRING_KEY + currentKey = null; + currentKeyStart = 0; + currentKeyLength = 0; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + currentKey = null; + currentKeyLength = 0; +#ENDIF SERIALIZE_KEY + } else { + currKeyIsNull = false; +#IF LONG_KEY + currentKey = keyVector[firstBatchIndex]; +#ENDIF LONG_KEY +#IF STRING_KEY + currentKey = keyVector[firstBatchIndex]; + currentKeyStart = keyStart[firstBatchIndex]; + currentKeyLength = keyLength[firstBatchIndex]; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, firstBatchIndex); + currentKey = currentKeyOutput.getData(); + currentKeyLength = currentKeyOutput.getLength(); +#ENDIF SERIALIZE_KEY + } +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Current key values for physical (i.e. NOT selectedInUse) and the batch's keys have no +#COMMENT NULLs case. All variations. +#COMMENT +#BEGIN_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES +#IF LONG_KEY + long currentKey = keyVector[0]; +#ENDIF LONG_KEY +#IF STRING_KEY + byte[] currentKey = keyVector[0]; + int currentKeyStart = keyStart[0]; + int currentKeyLength = keyLength[0]; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, 0); + byte[] currentKey = currentKeyOutput.getData(); + int currentKeyLength = currentKeyOutput.getLength(); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Current key values for physical (i.e. NOT selectedInUse) and the batch's keys may have +#COMMENT NULLs case. All variations. +#COMMENT +#BEGIN_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + boolean[] keyIsNull = keyColVector.isNull; + boolean currKeyIsNull; + +#IF LONG_KEY + long currentKey; +#ENDIF LONG_KEY +#IF STRING_KEY + byte[] currentKey; + int currentKeyStart; + int currentKeyLength; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + byte[] currentKey; + int currentKeyLength; +#ENDIF SERIALIZE_KEY + if (keyIsNull[0]) { + currKeyIsNull = true; +#IF LONG_KEY + currentKey = 0; +#ENDIF LONG_KEY +#IF STRING_KEY + currentKey = null; + currentKeyStart = 0; + currentKeyLength = 0; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + currentKey = null; + currentKeyLength = 0; +#ENDIF SERIALIZE_KEY + } else { + currKeyIsNull = false; +#IF LONG_KEY + currentKey = keyVector[0]; +#ENDIF LONG_KEY +#IF STRING_KEY + currentKey = keyVector[0]; + currentKeyStart = keyStart[0]; + currentKeyLength = keyLength[0]; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, 0); + currentKey = currentKeyOutput.getData(); + currentKeyLength = currentKeyOutput.getLength(); +#ENDIF SERIALIZE_KEY + } +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Get next key value at batchIndex. +#COMMENT All variations. +#COMMENT +#BEGIN_LINES GET_NEXT_KEY +#IF LONG_KEY + final long nextKey = keyVector[batchIndex]; +#ENDIF LONG_KEY +#IF STRING_KEY + byte[] nextKey = keyVector[batchIndex]; + final int nextKeyStart = keyStart[batchIndex]; + final int nextKeyLength = keyLength[batchIndex]; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(nextKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, batchIndex); + final byte[] nextKey = nextKeyOutput.getData(); + final int nextKeyLength = nextKeyOutput.getLength(); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT IF statement next key value equals current key value for all variations. +#COMMENT +#BEGIN_LINES IF_NEXT_EQUALS_CURRENT +#IF LONG_KEY + if (currentKey == nextKey) { +#ENDIF LONG_KEY +#IF STRING_KEY + if (StringExpr.equal( + currentKey, currentKeyStart, currentKeyLength, + nextKey, nextKeyStart, nextKeyLength)) { +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + if (StringExpr.equal( + currentKey, 0, currentKeyLength, + nextKey, 0, nextKeyLength)) { +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT ELSE IF statement next key value equals current key value for all variations. +#COMMENT +#BEGIN_LINES ELSE_IF_NEXT_EQUALS_CURRENT +#IF LONG_KEY + } else if (currentKey == nextKey) { +#ENDIF LONG_KEY +#IF STRING_KEY + } else if (StringExpr.equal( + currentKey, currentKeyStart, currentKeyLength, + nextKey, nextKeyStart, nextKeyLength)) { +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + } else if (StringExpr.equal( + currentKey, 0, currentKeyLength, + nextKey, 0, nextKeyLength)) { +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT Remember the next key value as the current key value. All variations. +#COMMENT +#BEGIN_LINES NEW_CURRENT_KEY + currentKey = nextKey; +#IF STRING_KEY + currentKeyStart = nextKeyStart; + currentKeyLength = nextKeyLength; +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + currentKeyLength = nextKeyLength; + final Output tempOutput = nextKeyOutput; + nextKeyOutput = currentKeyOutput; + currentKeyOutput = tempOutput; +#ENDIF SERIALIZE_KEY +#END_LINES \ No newline at end of file diff --git ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyDuplicateReductionOperator.txt ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyDuplicateReductionOperator.txt new file mode 100644 index 0000000..51b05bb --- /dev/null +++ ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyDuplicateReductionOperator.txt @@ -0,0 +1,470 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +#IF SERIALIZE_KEY +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorSerializeRow; +import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite; +#ENDIF SERIALIZE_KEY +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; +import org.apache.hive.common.util.HashCodeUtil; + +/* + * Specialized class for doing a Native Vectorized GroupBy with no aggregation. + * + * It is used on a single key for duplicate key reduction. + * + * Final duplicate elimination must be done in reduce-shuffle and a reducer since with hash table + * overflow some duplicates can slip through. And, of course, other vertices may contribute + * the same keys. + */ +public class + extends VectorGroupByHashKeyDuplicateReductionOperatorBase { + + private static final long serialVersionUID = 1L; + + // Non-transient members initialized by the constructor. They cannot be final due to Kryo. + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + +#IF SERIALIZE_KEY + // Object that can take a single column in row in a vectorized row batch and serialized it. + // The key is not NULL. + private transient VectorSerializeRow keyVectorSerializeWrite; + + // The BinarySortable serialization of the current key. + private transient Output currentKeyOutput; + + // The BinarySortable serialization of the next key for a possible series of equal keys. + private transient Output nextKeyOutput; + +#ENDIF SERIALIZE_KEY + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public () { + super(); + } + + public (CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + +#IF LONG_KEY + allocateBucketArray(LONG_DUPLICATE_REDUCTION_ENTRY_SIZE); +#ELSE + allocateBucketArray(BYTES_DUPLICATE_REDUCTION_ENTRY_SIZE); +#ENDIF LONG_KEY +#IF SERIALIZE_KEY + + keyVectorSerializeWrite = + new VectorSerializeRow( + new BinarySortableSerializeWrite(1)); + TypeInfo[] typeInfos = new TypeInfo[] { groupByKeyExpressions[0].getOutputTypeInfo() }; + int[] columnMap = new int[] { groupByKeyExpressions[0].getOutputColumnNum() }; + keyVectorSerializeWrite.init(typeInfos, columnMap); + + currentKeyOutput = new Output(); + nextKeyOutput = new Output(); +#ENDIF SERIALIZE_KEY + } + +#COMMENT=========================================================================================== +#COMMENT +#COMMENT These code line snippets are intended to: +#COMMENT 1) Reduce code duplication +#COMMENT 2) To not incur the cost of calling methods or having abstract objects +#COMMENT 3) Or, to have to to parameterize for methods. +#COMMENT 4) Separate the the key variation variables and logic from the common loop logic. +#COMMENT +#INCLUDE GroupByHashSingleKeyCommonLines +#COMMENT=========================================================================================== +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT The current series of equal keys ended -- find or create the hash table entry. +#COMMENT All variations. +#COMMENT +#BEGIN_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED +#IF LONG_KEY + if (currentKey == 0) { + haveZeroKey = true; + } else { + findOrCreateLongDuplicateReductionKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey)); + } +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesDuplicateReductionKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength)); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesDuplicateReductionKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength)); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch of no NULL keys, find or create the hash table +#COMMENT entry. All variations. +#COMMENT +#BEGIN_LINES LAST_NO_NULLS_DUPLICATE_REDUCTION_KEY +#IF LONG_KEY + if (currentKey == 0) { + + // We don't store 0 in the slot table so it can be used to indicate an empty slot. + haveZeroKey = true; + } else { + findOrCreateLongDuplicateReductionKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey)); + } +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesDuplicateReductionKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength)); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesDuplicateReductionKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength)); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch which may have NULL keys, find or create the +#COMMENT hash table entry. All variations. +#COMMENT +#BEGIN_LINES LAST_NULLS_DUPLICATE_REDUCTION_KEY + if (!currKeyIsNull) { +#IF LONG_KEY + if (currentKey == 0) { + + // We don't store 0 in the slot table so it can be used to indicate an empty slot. + haveZeroKey = true; + } else { + findOrCreateLongDuplicateReductionKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey)); + } +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesDuplicateReductionKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength)); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesDuplicateReductionKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength)); +#ENDIF SERIALIZE_KEY + } +#END_LINES + /* + * Repeating key case -- either all NULL keys or all same non-NULL key. + * + * For the all NULL or all 0 keys case we note NULL/0 key exists. Otherwise, we do the + * find/create. + */ + protected void handleRepeatingKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + if (keyColVector.noNulls || !keyColVector.isNull[0]) { +#IF LONG_KEY + final long repeatingKey = keyColVector.vector[0]; + if (repeatingKey == 0) { + + // We don't store 0 in the slot table so it can be used to indicate an empty slot. + haveZeroKey = true; + } else { + findOrCreateLongDuplicateReductionKey( + repeatingKey, + HashCodeUtil.calculateLongHashCode(repeatingKey)); + } +#ENDIF LONG_KEY +#IF STRING_KEY + final byte[] repeatingKey = keyColVector.vector[0]; + final int repeatingKeyStart = keyColVector.start[0]; + final int repeatingKeyLength = keyColVector.length[0]; + findOrCreateBytesDuplicateReductionKey( + repeatingKey, repeatingKeyStart, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, repeatingKeyStart, repeatingKeyLength)); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, 0); + byte[] repeatingKey = currentKeyOutput.getData(); + int repeatingKeyLength = currentKeyOutput.getLength(); + findOrCreateBytesDuplicateReductionKey( + repeatingKey, 0, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, 0, repeatingKeyLength)); +#ENDIF SERIALIZE_KEY + } else { + + // We note we encountered a repeating NULL key. + haveNullKey = true; + } + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NO NULLS key case. + * + * Do find/create on each key. + */ + protected void handleLogicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + // Equal key series. + } else { + + // Current key ended. +#USE_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_DUPLICATE_REDUCTION_KEY + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NULLS key case. + * + * For all NULL keys cases we note NULL key exists since we don't represent it in the slot table. + * + * Do find/create on each non-NULL key. + */ + protected void handleLogicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + // NULL key series. + } else { + + // Current non-NULL key ended by NULL key. +#USE_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED + + // New NULL key. + currKeyIsNull = true; + + // We note we encountered a NULL key. + haveNullKey = true; + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + // Equal key series. + } else { + + // Current non-NULL key ended by another non-NULL key. +#USE_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_DUPLICATE_REDUCTION_KEY + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NO NULLS key case. + * + * (For remaining comments for handleLogicalNoNullsKey). + */ + protected void handlePhysicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + // Equal key series. + } else { + + // Current key ended. +#USE_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_DUPLICATE_REDUCTION_KEY + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NULLS key case. + * + * (For remaining comments for handleLogicalNullsKey). + * + */ + protected void handlePhysicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + // NULL key series. + } else { + + // Current non-NULL key ended by NULL key. +#USE_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED + + // New NULL key. + currKeyIsNull = true; + + // We note we encountered a NULL key. + haveNullKey = true; + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended by non-NULL key. + currKeyIsNull = false; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + // Equal key series. + } else { + + // Current non-NULL key ended by non-NULL key. +#USE_LINES CURRENT_DUPLICATE_REDUCTION_KEY_ENDED + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_DUPLICATE_REDUCTION_KEY + } + + @Override + protected void outputSingleKeys( + ColumnVector keyColumnVector) throws HiveException { + +#IF LONG_KEY + doOutputLongKeys((LongColumnVector) keyColumnVector); +#ENDIF LONG_KEY +#IF STRING_KEY + doOutputStringKeys((BytesColumnVector) keyColumnVector); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + doOutputSerializeKeys(keyColumnVector); +#ENDIF SERIALIZE_KEY + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyOperatorBase.txt ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyOperatorBase.txt new file mode 100644 index 0000000..fdd0ce0 --- /dev/null +++ ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeyOperatorBase.txt @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.key..VectorGroupByHashKeyTable; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/* + * An single key map optimized for Native Vectorized GroupBy. + */ +public abstract class + extends VectorGroupByHashKeyTable { + + private static final long serialVersionUID = 1L; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public () { + super(); + } + + public (CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + } + + @Override + protected void doMainLoop(VectorizedRowBatch batch, final int inputLogicalSize) + throws HiveException, IOException { + + keyColVector = () batch.cols[keyColumnNum]; + + // When key is repeated we want to short-circuit and finish quickly so we don't have to + // have special repeated key logic later. + if (keyColVector.isRepeating) { + + handleRepeatingKey(batch, inputLogicalSize, keyColVector); + return; + } + + if (batch.selectedInUse) { + + // Map logical to (physical) batch index. + + if (keyColVector.noNulls) { + + // LOGICAL, Key: NO NULLS. + + handleLogicalNoNullsKey(batch, inputLogicalSize, keyColVector); + + } else { + + // LOGICAL, Key: NULLS. + + handleLogicalNullsKey(batch, inputLogicalSize, keyColVector); + } + + } else { + + // NOT selectedInUse. No rows filtered out -- so logical index is the (physical) batch index. + + if (keyColVector.noNulls) { + + // PHYSICAL, Key: NO NULLS. + + handlePhysicalNoNullsKey(batch, inputLogicalSize, keyColVector); + + } else { + + // PHYSICAL, Key: NULLS. + + handlePhysicalNullsKey(batch, inputLogicalSize, keyColVector); + } + } + } + + protected abstract void handleRepeatingKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException; + + protected abstract void handleLogicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException; + + protected abstract void handleLogicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException; + + protected abstract void handlePhysicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException; + + protected abstract void handlePhysicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException; +} \ No newline at end of file diff --git ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountColumnOperator.txt ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountColumnOperator.txt new file mode 100644 index 0000000..3bbbba4 --- /dev/null +++ ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountColumnOperator.txt @@ -0,0 +1,1243 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen; + +import java.io.IOException; +import java.util.ArrayList; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +#IF SERIALIZE_KEY +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorSerializeRow; +import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite; +#ENDIF SERIALIZE_KEY +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; +import org.apache.hive.common.util.HashCodeUtil; + +/* + * Specialized class for doing a COUNT(non-key-column) Native Vectorized GroupBy. That is, + * the grouping is being done on a single long key and the counting + * is for a another ("non-key") column (which can be any data type). + * + * We make a single pass. We loop over key column and process the keys. We look for + * sequences of NULL keys or equal keys. And, at the same time do any processing for the + * non-key-column counting. + * + * NOTE: Both NULL and non-NULL keys have counts for non-key-columns. So, after counting the + * non-NULL fields for the non-key-column, we always do a hash table find/create even when the count + * is 0 since the all those keys must be part of the output result. + + // A key will get created even when there are no non-NULL column values. Count includes 0. + + findOrCreateLongZeroCountKey( + key, + longKeySeries.currentHashCode, + nonNullCount); + + */ +public class + extends VectorGroupByHashKeySingleCountOperatorBase { + + private static final long serialVersionUID = 1L; + + // Non-transient members initialized by the constructor. They cannot be final due to Kryo. + + protected int countColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + +#IF SERIALIZE_KEY + // Object that can take the single column in row in a vectorized row batch and serialized it. + // The key is not NULL. + private transient VectorSerializeRow keyVectorSerializeWrite; + + // The BinarySortable serialization of the current key. + private transient Output currentKeyOutput; + + // The BinarySortable serialization of the next key for a possible series of equal keys. + private transient Output nextKeyOutput; + +#ENDIF SERIALIZE_KEY + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public () { + super(); + } + + public (CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + countColumnNum = singleCountAggregation.getCountColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + +#IF LONG_KEY + allocateBucketArray(LONG_ZERO_COUNT_ENTRY_SIZE); +#ELSE + allocateBucketArray(BYTES_ENTRY_SIZE); +#ENDIF LONG_KEY +#IF SERIALIZE_KEY + + keyVectorSerializeWrite = + new VectorSerializeRow( + new BinarySortableSerializeWrite(1)); + TypeInfo[] typeInfos = new TypeInfo[] { groupByKeyExpressions[0].getOutputTypeInfo() }; + int[] columnMap = new int[] { groupByKeyExpressions[0].getOutputColumnNum() }; + keyVectorSerializeWrite.init(typeInfos, columnMap); + + currentKeyOutput = new Output(); + nextKeyOutput = new Output(); +#ENDIF SERIALIZE_KEY + } + +#COMMENT=========================================================================================== +#COMMENT +#COMMENT These code line snippets are intended to: +#COMMENT 1) Reduce code duplication +#COMMENT 2) To not incur the cost of calling methods or having abstract objects +#COMMENT 3) Or, to have to to parameterize for methods. +#COMMENT 4) Separate the the key variation variables and logic from the common loop logic. +#COMMENT +#INCLUDE GroupByHashSingleKeyCommonLines +#COMMENT +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT The current series of equal keys ended -- find or create the hash table entry and +#COMMENT add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES CURRENT_COLUMN_COUNT_KEY_ENDED +#IF LONG_KEY + findOrCreateLongZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT The current series of equal keys ended -- create the hash table entry if necessary; +#COMMENT ignore if it already present since the count is 0 in this case. All variations. +#COMMENT +#BEGIN_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT +#IF LONG_KEY + findOrCreateLongZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + 0); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + 0); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + 0); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch of no NULL keys, find or create the hash table +#COMMENT entry and add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY +#IF LONG_KEY + findOrCreateLongZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch which may have NULL keys, find or create the +#COMMENT hash table entry and add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES LAST_NULLS_COLUMN_COUNT_KEY + if (currKeyIsNull) { + haveNullKey = true; + nullKeyCount += count; + } else { +#IF LONG_KEY + findOrCreateLongZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY + } +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch which may have NULL keys, create the hash table +#COMMENT entry if necessary; ignore if it already present since the count is 0 in this case. +#COMMENT All variations. +#COMMENT +#BEGIN_LINES LAST_NO_NULLS_KEY_COLUMN_COUNT +#IF LONG_KEY + findOrCreateLongZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + 0); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + 0); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + 0); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch of no NULL keys, create the hash table entry +#COMMENT if necessary; ignore if it already present since the count is 0 in this case. +#COMMENT All variations. +#COMMENT +#BEGIN_LINES LAST_NULLS_KEY_COLUMN_COUNT + if (currKeyIsNull) { + haveNullKey = true; + } else { +#IF LONG_KEY + findOrCreateLongZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + 0); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + 0); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + 0); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT=========================================================================================== +#COMMENT + /* + * Repeating key case -- it is either ALL NULL keys or ALL same non-NULL keys. + * + * First, we determine the number of non-NULL values in the non-key column. + * Then, whether ALL NULL keys or ALL same non-NULL keys, we create the key if necessary and + * include the new count. + * + * A NULL key is not in the slot table. It is separately represented by members haveNullKey + * and nullKeyCount. + * + */ + @Override + protected void handleRepeatingKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) + throws HiveException, IOException { + + /* + * First, determine the count of the non-key column for the whole batch which is covered by the + * repeating key. + */ + ColumnVector nonKeyColVector = batch.cols[countColumnNum]; + int nonKeyNonNullCount; + if (nonKeyColVector.noNulls) { + + // NOTE: This may or may not have nonKeyColVector.isRepeating == true. + // Non-Key: [REPEATING,] NO NULLS + nonKeyNonNullCount = inputLogicalSize; + + } else if (nonKeyColVector.isRepeating) { + + // Non-Key: REPEATING, NULLS Possible + nonKeyNonNullCount = (nonKeyColVector.isNull[0] ? 0 : inputLogicalSize); + + } else { + + // Non-Key: NOT REPEATING, NULLS Possible. + boolean[] nonKeyIsNull = nonKeyColVector.isNull; + nonKeyNonNullCount = 0; + if (batch.selectedInUse) { + + int[] selected = batch.selected; + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + if (nonKeyIsNull[batchIndex]) { + nonKeyNonNullCount++; + } + } + } else { + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + if (nonKeyIsNull[batchIndex]) { + nonKeyNonNullCount++; + } + } + } + } + + /* + * Finally, use the non-key non-NULL count for our repeated non-NULL or NULL keys. + */ + if (keyColVector.noNulls || !keyColVector.isNull[0]) { + + // Non-NULL key. +#IF LONG_KEY + final long repeatingKey = keyColVector.vector[0]; + findOrCreateLongZeroCountKey( + repeatingKey, + HashCodeUtil.calculateLongHashCode(repeatingKey), + nonKeyNonNullCount); +#ENDIF LONG_KEY +#IF STRING_KEY + final byte[] repeatingKey = keyColVector.vector[0]; + final int repeatingKeyStart = keyColVector.start[0]; + final int repeatingKeyLength = keyColVector.length[0]; + findOrCreateBytesKey( + repeatingKey, repeatingKeyStart, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, repeatingKeyStart, repeatingKeyLength), + nonKeyNonNullCount); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, 0); + byte[] repeatingKey = currentKeyOutput.getData(); + int repeatingKeyLength = currentKeyOutput.getLength(); + findOrCreateBytesKey( + repeatingKey, 0, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, 0, repeatingKeyLength), + nonKeyNonNullCount); +#ENDIF SERIALIZE_KEY + } else { + + // All NULL keys. Since we are counting a non-Key column, we must count it under the NULL + // pseudo-entry. + haveNullKey = true; + nullKeyCount += nonKeyNonNullCount; + + } + } + + /* + * Do the non-key-column {REPEATING|NO REPEATING} NO NULLS case for handleLogicalNoNullsKey. + * + * Look for sequences of equal keys and determine their count. + */ + private void doLogicalNoNullsKeyNoNullsColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector) + throws HiveException, IOException { + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY + } + + /* + * Do the non-key-column REPEATING NULLS case for handleLogicalNoNullsKey. + * + * Scan for sequences of equal keys. The column count is simply 0 because of all NULL values -- + * but we still must create an entry in the slot table. + */ + private void doLogicalNoNullsKeyRepeatingNullColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector) + throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + + // This loop basically does any needed key creation since the non-key count is 0 because + // repeating non-key NULL. + +#USE_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + // No counting. + } else { + + // Current key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT + + // New current key. +#USE_LINES NEW_CURRENT_KEY + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_KEY_COLUMN_COUNT + } + + /* + * Do the NO REPEATING NULLS case for handleLogicalNoNullsKey. + * + * Look for sequence of equal keys -- look over at the non-key-column and count non-null rows. + * Even when the non-NULL row count is 0, we still must create an entry in the slot table. + */ + private void doLogicalNoNullsKeyNullsColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector, ColumnVector nonKeyColVector) + throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + + boolean[] nonKeyIsNull = nonKeyColVector.isNull; + +#USE_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = (nonKeyIsNull[firstBatchIndex] ? 0 : 1); + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count += (nonKeyIsNull[batchIndex] ? 0 : 1); + } else { + + // Current key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = (nonKeyIsNull[batchIndex] ? 0 : 1); + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NO NULLS key case. + * + * In general, loop over key column and process the keys. Look for sequences of equal keys. And, + * at the same time do any processing for the non-key-column counting. + * + * Here are the cases: + * + * 1) When non-key-column {REPEATING|NO REPEATING} NO NULLS, look for sequences of equal keys + * and determine their count. + * + * 2) When non-key-column REPEATING NULLS, scan for sequences of equal keys. The column count + * is simply 0 because of all NULL values -- but we still must create an entry in the + * slot table. + * + * 3) Otherwise, non-key-column NO REPEATING NULLS, as we are looking for sequence of + * equal keys -- look over at the non-key-column and count non-null rows. Even when the + * non-null row count is 0, we still must create an entry in the slot table. + * + */ + @Override + protected void handleLogicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + ColumnVector nonKeyColVector = batch.cols[countColumnNum]; + + if (nonKeyColVector.noNulls) { + + // NOTE: This may or may not have nonKeyColVector.isRepeating == true. + // Non-Key: {REPEATING|NO REPEATING} NO NULLS + + doLogicalNoNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } else if (nonKeyColVector.isRepeating) { + + // Non-Key: REPEATING, NULLS Possible. + + if (nonKeyColVector.isNull[0]) { + + // NULL repeating non-key column. + doLogicalNoNullsKeyRepeatingNullColumn(batch, inputLogicalSize, keyColVector); + + } else { + + // REPEATING NO NULLS + doLogicalNoNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } + } else { + + // Non-Key: NOT REPEATING, NULLS. + + doLogicalNoNullsKeyNullsColumn(batch, inputLogicalSize, keyColVector, nonKeyColVector); + + } + } + + /* + * Do the non-key-column {REPEATING|NO REPEATING} NO NULLS case for handleLogicalNullsKey. + * + * (For remaining comments see doLogicalNoNullsKeyNoNullsColumn). + */ + private void doLogicalNullsKeyNoNullsColumn(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New NULL key. + currKeyIsNull = true; + count = 1; + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + nullKeyCount += count; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY + } + + /* + * Do the non-key-column REPEATING NULLS case for handleLogicalNullsKey. + * + * (For remaining comments see doLogicalNoNullsKeyRepeatingNullColumn). + */ + private void doLogicalNullsKeyRepeatingNullColumn(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + + // This loop basically does any needed key creation since the non-key count is 0 because + // repeating non-key NULL. + +#USE_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + // No counting. + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT + + // New NULL key. + currKeyIsNull = true; + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + // No counting + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_KEY_COLUMN_COUNT + } + } + + /* + * Do the non-key-column NO REPEATING NULLS case for handleLogicalNullsKey. + * + * (For remaining comments see doLogicalNoNullsKeyNullsColumn). + */ + private void doLogicalNullsKeyNullsColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector, ColumnVector nonKeyColVector) + throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + + boolean[] nonKeyIsNull = nonKeyColVector.isNull; + +#USE_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + + int count = (nonKeyIsNull[firstBatchIndex] ? 0 : 1); + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + count += (nonKeyIsNull[batchIndex] ? 0 : 1); + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New NULL key. + currKeyIsNull = true; + count = (nonKeyIsNull[batchIndex] ? 0 : 1); + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + nullKeyCount += count; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = (nonKeyIsNull[batchIndex] ? 0 : 1); +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + count += (nonKeyIsNull[batchIndex] ? 0 : 1); + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = (nonKeyIsNull[batchIndex] ? 0 : 1); + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_COLUMN_COUNT_KEY + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NULLS key case. + * + * Both NULL and non-NULL keys will have counts for non-key-columns. + * + * In general, loop over key column and process the keys. Look for sequences of NULL keys or + * equal keys. And, at the same time do any processing for the non-key-column counting. + * + * (See the non-key column case comments for handleLogicalNoNullsKey). + * + * In all cases above, when its a NULL key, do NULL entry processing. + * + */ + @Override + protected void handleLogicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + ColumnVector nonKeyColVector = batch.cols[countColumnNum]; + + if (nonKeyColVector.noNulls) { + + // NOTE: This may or may not have nonKeyColVector.isRepeating == true. + // Non-Key: {REPEATING|NO REPEATING} NO NULLS + + doLogicalNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } else if (nonKeyColVector.isRepeating) { + + // Non-Key: REPEATING, NULLS Possible. + + if (nonKeyColVector.isNull[0]) { + + // NULL repeating non-key column. + doLogicalNullsKeyRepeatingNullColumn(batch, inputLogicalSize, keyColVector); + + } else { + + // Non-NULL repeating non-key column. + doLogicalNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } + } else { + + // Non-Key: NOT REPEATING, NULLS Possible. + + doLogicalNullsKeyNullsColumn(batch, inputLogicalSize, keyColVector, nonKeyColVector); + + } + } + + //=============================================================================================== + //=============================================================================================== + + /* + * Do the non-key-column {REPEATING|NO REPEATING} NO NULLS case for handlePhysicalNoNullsKey. + * + * (For remaining comments see doLogicalNoNullsKeyNoNullsColumn). + */ + private void doPhysicalNoNullsKeyNoNullsColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector) + throws HiveException, IOException { + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY + } + + /* + * Do the non-key-column REPEATING NULLS case for handleLogicalNoNullsKey. + * + * (For remaining comments see doLogicalNoNullsKeyRepeatingNullColumn). + */ + private void doPhysicalNoNullsKeyRepeatingNullColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector) + throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + + // This loop basically does any needed key creation since the non-key count is 0 because + // repeating non-key NULL. + +#USE_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + // No counting. + } else { + + // Current key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT + + // New current key. +#USE_LINES NEW_CURRENT_KEY + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_KEY_COLUMN_COUNT + } + + /* + * Do the NO REPEATING NULLS case for handleLogicalNoNullsKey. + * + * (For remaining comments see doLogicalNoNullsKeyNullsColumn). + */ + private void doPhysicalNoNullsKeyNullsColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector, ColumnVector nonKeyColVector) + throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + + boolean[] nonKeyIsNull = nonKeyColVector.isNull; + +#USE_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = (nonKeyIsNull[0] ? 0 : 1); + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count += (nonKeyIsNull[batchIndex] ? 0 : 1); + } else { + + // Current key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = (nonKeyIsNull[batchIndex] ? 0 : 1); + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NO NULLS key case. + * + * (For remaining comments see handleLogicalNoNullsKey). + */ + @Override + protected void handlePhysicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + ColumnVector nonKeyColVector = batch.cols[countColumnNum]; + + if (nonKeyColVector.noNulls) { + + // NOTE: This may or may not have nonKeyColVector.isRepeating == true. + // Non-Key: {REPEATING|NO REPEATING} NO NULLS + + doPhysicalNoNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } else if (nonKeyColVector.isRepeating) { + + // Non-Key: REPEATING, NULLS Possible. + + if (nonKeyColVector.isNull[0]) { + + // NULL repeating non-key column. + doPhysicalNoNullsKeyRepeatingNullColumn(batch, inputLogicalSize, keyColVector); + + } else { + + // REPEATING NO NULLS + doPhysicalNoNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } + } else { + + // Non-Key: NOT REPEATING, NULLS. + + doPhysicalNoNullsKeyNullsColumn(batch, inputLogicalSize, keyColVector, nonKeyColVector); + + } + } + + private void doPhysicalNullsKeyNoNullsColumn(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New NULL key. + currKeyIsNull = true; + count = 1; + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + nullKeyCount += count; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COLUMN_COUNT_KEY + } + + private void doPhysicalNullsKeyRepeatingNullColumn(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + + // This loop basically does any needed key creation since the non-key count is 0 because + // repeating non-key NULL. + +#USE_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + // No counting. + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT + + // New NULL key. + currKeyIsNull = true; + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + // No counting + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED_ZERO_COUNT + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_KEY_COLUMN_COUNT + } + } + + private void doPhysicalNullsKeyNullsColumn(VectorizedRowBatch batch, + final int inputLogicalSize, keyColVector, ColumnVector nonKeyColVector) + throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + + boolean[] nonKeyIsNull = nonKeyColVector.isNull; + +#USE_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + + int count = (nonKeyIsNull[0] ? 0 : 1); + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + count += (nonKeyIsNull[batchIndex] ? 0 : 1); + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New NULL key. + currKeyIsNull = true; + count = (nonKeyIsNull[batchIndex] ? 0 : 1); + } + + } else { + +#USE_LINES GET_NEXT_KEY + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + nullKeyCount += count; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = (nonKeyIsNull[batchIndex] ? 0 : 1); +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT + + count += (nonKeyIsNull[batchIndex] ? 0 : 1); + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COLUMN_COUNT_KEY_ENDED + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY + + count = (nonKeyIsNull[batchIndex] ? 0 : 1); + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_COLUMN_COUNT_KEY + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NULLS key case. + * + * (For remaining comments for handleLogicalNullsKey). + */ + @Override + protected void handlePhysicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + ColumnVector nonKeyColVector = batch.cols[countColumnNum]; + + if (nonKeyColVector.noNulls) { + + // NOTE: This may or may not have nonKeyColVector.isRepeating == true. + // Non-Key: {REPEATING|NO REPEATING} NO NULLS + + doPhysicalNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } else if (nonKeyColVector.isRepeating) { + + // Non-Key: REPEATING, NULLS Possible. + + if (nonKeyColVector.isNull[0]) { + + // NULL repeating non-key column. + doPhysicalNullsKeyRepeatingNullColumn(batch, inputLogicalSize, keyColVector); + + } else { + + // Non-NULL repeating non-key column. + doPhysicalNullsKeyNoNullsColumn(batch, inputLogicalSize, keyColVector); + + } + } else { + + // Non-Key: NOT REPEATING, NULLS Possible. + + doPhysicalNullsKeyNullsColumn(batch, inputLogicalSize, keyColVector, nonKeyColVector); + + } + } + + @Override + protected void outputSingleKeyAndCountPairs( + ColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + +#IF LONG_KEY + outputLongZeroCountKeyAndCountPairs( + (LongColumnVector) keyColumnVector, countColumnVector); +#ENDIF LONG_KEY +#IF STRING_KEY + doOutputStringKeyAndCountPairs( + (BytesColumnVector) keyColumnVector, countColumnVector); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + doOutputSerializeKeyAndCountPairs( + keyColumnVector, countColumnVector); +#ENDIF SERIALIZE_KEY + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountKeyOperator.txt ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountKeyOperator.txt new file mode 100644 index 0000000..48a15b3 --- /dev/null +++ ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountKeyOperator.txt @@ -0,0 +1,512 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen; + +import java.io.IOException; +import java.util.ArrayList; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +#IF SERIALIZE_KEY +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorSerializeRow; +import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite; +#ENDIF SERIALIZE_KEY +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hive.common.util.HashCodeUtil; + +/* + * Specialized class for doing a COUNT(key-column) Native Vectorized GroupBy. That is, + * the grouping is being done on one long key and we are counting it. + * + * The NULL key is not represented in the hash table. We handle them as a special case. So, + * the find/create call for non-NULL keys looks like this: + + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); + + */ +public class + extends VectorGroupByHashKeySingleCountOperatorBase { + + private static final long serialVersionUID = 1L; + + // Non-transient members initialized by the constructor. They cannot be final due to Kryo. + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + +#IF SERIALIZE_KEY + // Object that can take a single column in row in a vectorized row batch and serialized it. + // The key is not NULL. + private transient VectorSerializeRow keyVectorSerializeWrite; + + // The BinarySortable serialization of the current key. + private transient Output currentKeyOutput; + + // The BinarySortable serialization of the next key for a possible series of equal keys. + private transient Output nextKeyOutput; + +#ENDIF SERIALIZE_KEY + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public () { + super(); + } + + public (CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + +#IF LONG_KEY + allocateBucketArray(LONG_NON_ZERO_COUNT_ENTRY_SIZE); +#ELSE + allocateBucketArray(BYTES_ENTRY_SIZE); +#ENDIF LONG_KEY +#IF SERIALIZE_KEY + + keyVectorSerializeWrite = + new VectorSerializeRow( + new BinarySortableSerializeWrite(1)); + TypeInfo[] typeInfos = new TypeInfo[] { groupByKeyExpressions[0].getOutputTypeInfo() }; + int[] columnMap = new int[] { groupByKeyExpressions[0].getOutputColumnNum() }; + keyVectorSerializeWrite.init(typeInfos, columnMap); + + currentKeyOutput = new Output(); + nextKeyOutput = new Output(); +#ENDIF SERIALIZE_KEY + } + +#COMMENT=========================================================================================== +#COMMENT +#COMMENT These code line snippets are intended to: +#COMMENT 1) Reduce code duplication +#COMMENT 2) To not incur the cost of calling methods or having abstract objects +#COMMENT 3) Or, to have to to parameterize for methods. +#COMMENT 4) Separate the the key variation variables and logic from the common loop logic. +#COMMENT +#INCLUDE GroupByHashSingleKeyCommonLines +#COMMENT +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT The current series of equal keys ended -- find or create the hash table entry and +#COMMENT add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES CURRENT_COUNT_KEY_ENDED +#IF LONG_KEY + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch of no NULL keys, find or create the hash table +#COMMENT entry and add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES LAST_NO_NULLS_COUNT_KEY +#IF LONG_KEY + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch which may have NULL keys, find or create the +#COMMENT hash table entry and add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES LAST_NULLS_COUNT_KEY + if (!currKeyIsNull) { +#IF LONG_KEY + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY + } +#END_LINES +#COMMENT=========================================================================================== +#COMMENT + /* + * Repeating key case -- either all NULL keys or all same non-NULL key. + * + * For all NULL keys case we note NULL key exists but leave its count as 0. + */ + @Override + protected void handleRepeatingKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + if (keyColVector.noNulls || !keyColVector.isNull[0]) { +#IF LONG_KEY + final long repeatingKey = keyColVector.vector[0]; + findOrCreateLongNonZeroCountKey( + repeatingKey, + HashCodeUtil.calculateLongHashCode(repeatingKey), + inputLogicalSize); +#ENDIF LONG_KEY +#IF STRING_KEY + final byte[] repeatingKey = keyColVector.vector[0]; + final int repeatingKeyStart = keyColVector.start[0]; + final int repeatingKeyLength = keyColVector.length[0]; + findOrCreateBytesKey( + repeatingKey, repeatingKeyStart, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, repeatingKeyStart, repeatingKeyLength), + inputLogicalSize); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, 0); + byte[] repeatingKey = currentKeyOutput.getData(); + int repeatingKeyLength = currentKeyOutput.getLength(); + findOrCreateBytesKey( + repeatingKey, 0, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, 0, repeatingKeyLength), + inputLogicalSize); +#ENDIF SERIALIZE_KEY + } else { + + // We note we encountered a repeating NULL key. But there will be no count for it -- + // just NULL. + haveNullKey = true; + } + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NO NULLS key case. + * + * Do find/create on each key with count count. + */ + @Override + protected void handleLogicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current key ended. +#USE_LINES CURRENT_COUNT_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COUNT_KEY + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NULLS key case. + * + * For all NULL keys cases we note NULL key exists but leave its count as 0. + * + * Do find/create on each non-NULL key with count count. + */ + @Override + protected void handleLogicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + + int count; + if (currKeyIsNull) { + count = 0; + + // We note we encountered a NULL key. But there will be no count for it -- just NULL. + haveNullKey = true; + } else { + count = 1; + } + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + // We don't count NULLs for NULL key. + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_KEY_ENDED +2 + + // New NULL key. + currKeyIsNull = true; + count = 0; + + // We note we encountered a NULL key. But there will be no count for it -- just NULL. + haveNullKey = true; + } + + } else { + +#USE_LINES GET_NEXT_KEY +2 + if (currKeyIsNull) { + + // Current NULL key ended. We don't count NULLs for NULL key. + currKeyIsNull = false; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT +2 + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_KEY_ENDED +2 + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_COUNT_KEY + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NO NULLS key case. + * + * (For remaining comments for handleLogicalNoNullsKey). + */ + @Override + protected void handlePhysicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current key ended. +#USE_LINES CURRENT_COUNT_KEY_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COUNT_KEY + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NULLS key case. + * + * (For remaining comments for handleLogicalNullsKey). + * + */ + @Override + protected void handlePhysicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + + int count; + if (currKeyIsNull) { + count = 0; + + // We note we encountered a NULL key. But there will be no count for it -- just NULL. + haveNullKey = true; + } else { + count = 1; + } + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + // We don't count NULLs for NULL key. + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_KEY_ENDED +2 + + // New NULL key. + currKeyIsNull = true; + count = 0; + + // We note we encountered a NULL key. But there will be no count for it -- just NULL. + haveNullKey = true; + } + + } else { + +#USE_LINES GET_NEXT_KEY +2 + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT +2 + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_KEY_ENDED +2 + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_COUNT_KEY + } + + @Override + protected void outputSingleKeyAndCountPairs( + ColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + +#IF LONG_KEY + outputLongNonZeroKeyAndCountPairs( + (LongColumnVector) keyColumnVector, countColumnVector); +#ENDIF LONG_KEY +#IF STRING_KEY + doOutputStringKeyAndCountPairs( + (BytesColumnVector) keyColumnVector, countColumnVector); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + doOutputSerializeKeyAndCountPairs( + keyColumnVector, countColumnVector); +#ENDIF SERIALIZE_KEY + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountStarOperator.txt ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountStarOperator.txt new file mode 100644 index 0000000..12a544e --- /dev/null +++ ql/src/gen/vectorization/GroupByOperatorTemplates/GroupByHashSingleKeySingleCountStarOperator.txt @@ -0,0 +1,496 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen; + +import java.io.IOException; +import java.util.ArrayList; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +#IF SERIALIZE_KEY +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorSerializeRow; +import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite; +#ENDIF SERIALIZE_KEY +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; +import org.apache.hive.common.util.HashCodeUtil; + +/* + * Specialized class for doing a COUNT(*) Native Vectorized GroupBy that is lookup on a single long + * using a specialized hash map. + * + Count Star + + NULL key has separate counter. + + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); + + */ +public class + extends VectorGroupByHashKeySingleCountOperatorBase { + + private static final long serialVersionUID = 1L; + + // Non-transient members initialized by the constructor. They cannot be final due to Kryo. + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + +#IF SERIALIZE_KEY + // Object that can take the single column in row in a vectorized row batch and serialized it. + // The key is not NULL. + private transient VectorSerializeRow keyVectorSerializeWrite; + + // The BinarySortable serialization of the current key. + private transient Output currentKeyOutput; + + // The BinarySortable serialization of the next key for a possible series of equal keys. + private transient Output nextKeyOutput; + +#ENDIF SERIALIZE_KEY + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public () { + super(); + } + + public (CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + +#IF LONG_KEY + allocateBucketArray(LONG_NON_ZERO_COUNT_ENTRY_SIZE); +#ELSE + allocateBucketArray(BYTES_ENTRY_SIZE); +#ENDIF LONG_KEY +#IF SERIALIZE_KEY + + keyVectorSerializeWrite = + new VectorSerializeRow( + new BinarySortableSerializeWrite(1)); + TypeInfo[] typeInfos = new TypeInfo[] { groupByKeyExpressions[0].getOutputTypeInfo() }; + int[] columnMap = new int[] { groupByKeyExpressions[0].getOutputColumnNum() }; + keyVectorSerializeWrite.init(typeInfos, columnMap); + + currentKeyOutput = new Output(); + nextKeyOutput = new Output(); +#ENDIF SERIALIZE_KEY + } + +#COMMENT=========================================================================================== +#COMMENT +#COMMENT These code line snippets are intended to: +#COMMENT 1) Reduce code duplication +#COMMENT 2) To not incur the cost of calling methods or having abstract objects +#COMMENT 3) Or, to have to to parameterize for methods. +#COMMENT 4) Separate the the key variation variables and logic from the common loop logic. +#COMMENT +#INCLUDE GroupByHashSingleKeyCommonLines +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT The current series of equal keys ended -- find or create the hash table entry and +#COMMENT add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES CURRENT_COUNT_STAR_ENDED +#IF LONG_KEY + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch of no NULL keys, find or create the hash table +#COMMENT entry and add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES LAST_NO_NULLS_COUNT_STAR +#IF LONG_KEY + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY +#END_LINES +#COMMENT +#COMMENT ******************************************************************************************* +#COMMENT After the key processing loop for a batch which may have NULL keys, find or create the +#COMMENT hash table entry and add or initialize it with the count. All variations. +#COMMENT +#BEGIN_LINES LAST_NULLS_COUNT_STAR + if (currKeyIsNull) { + haveNullKey = true; + nullKeyCount += count; + } else { +#IF LONG_KEY + findOrCreateLongNonZeroCountKey( + currentKey, + HashCodeUtil.calculateLongHashCode(currentKey), + count); +#ENDIF LONG_KEY +#IF STRING_KEY + findOrCreateBytesKey( + currentKey, currentKeyStart, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, currentKeyStart, currentKeyLength), + count); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + findOrCreateBytesKey( + currentKey, 0, currentKeyLength, + HashCodeUtil.calculateBytesHashCode( + currentKey, 0, currentKeyLength), + count); +#ENDIF SERIALIZE_KEY + } +#END_LINES +#COMMENT=========================================================================================== +#COMMENT + /* + * Repeating key case -- either all NULL keys or all same non-NULL key. + * + * For all NULL keys case we note NULL key exists AND count it. + */ + @Override + protected void handleRepeatingKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + if (keyColVector.noNulls || !keyColVector.isNull[0]) { +#IF LONG_KEY + final long repeatingKey = keyColVector.vector[0]; + findOrCreateLongNonZeroCountKey( + repeatingKey, + HashCodeUtil.calculateLongHashCode(repeatingKey), + inputLogicalSize); +#ENDIF LONG_KEY +#IF STRING_KEY + final byte[] repeatingKey = keyColVector.vector[0]; + final int repeatingKeyStart = keyColVector.start[0]; + final int repeatingKeyLength = keyColVector.length[0]; + findOrCreateBytesKey( + repeatingKey, repeatingKeyStart, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, repeatingKeyStart, repeatingKeyLength), + inputLogicalSize); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + keyVectorSerializeWrite.setOutput(currentKeyOutput); + keyVectorSerializeWrite.serializeWrite(batch, 0); + byte[] repeatingKey = currentKeyOutput.getData(); + int repeatingKeyLength = currentKeyOutput.getLength(); + findOrCreateBytesKey( + repeatingKey, 0, repeatingKeyLength, + HashCodeUtil.calculateBytesHashCode( + repeatingKey, 0, repeatingKeyLength), + inputLogicalSize); +#ENDIF SERIALIZE_KEY + } else { + + // We note we encountered a repeating NULL key. + haveNullKey = true; + nullKeyCount += inputLogicalSize; + } + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NO NULLS key case. + * + * Do find/create on each key with count count. + */ + @Override + protected void handleLogicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current key ended. +#USE_LINES CURRENT_COUNT_STAR_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } +#USE_LINES LAST_NO_NULLS_COUNT_STAR + } + + /* + * Logical batch processing (i.e. selectedInUse is true since rows were filtered out) for + * NULLS key case. + * + * For all NULL keys we note NULL key exists AND count it count. + * + * Do find/create on each non-NULL key with count count. + */ + @Override + protected void handleLogicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + + int[] selected = batch.selected; + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES LOGICAL_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int logicalIndex = 1; logicalIndex < inputLogicalSize; logicalIndex++) { + final int batchIndex = selected[logicalIndex]; + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_STAR_ENDED +2 + + // New NULL key. + currKeyIsNull = true; + count = 1; + } + + } else { + +#USE_LINES GET_NEXT_KEY +2 + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + nullKeyCount += count; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT +2 + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_STAR_ENDED +2 + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_COUNT_STAR + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NO NULLS key case. + * + * (For remaining comments for handleLogicalNoNullsKey). + */ + @Override + protected void handlePhysicalNoNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NO_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + +#USE_LINES GET_NEXT_KEY +#USE_LINES IF_NEXT_EQUALS_CURRENT + + count++; + } else { + + // Current key ended. +#USE_LINES CURRENT_COUNT_STAR_ENDED + + // New current key. +#USE_LINES NEW_CURRENT_KEY + + count = 1; + } + } + // Handle last key. +#USE_LINES LAST_NO_NULLS_COUNT_STAR + } + + /* + * Physical batch processing (i.e. selectedInUse is false since NO rows were filtered out) for + * NULLS key case. + * + * (For remaining comments for handleLogicalNullsKey). + * + */ + @Override + protected void handlePhysicalNullsKey(VectorizedRowBatch batch, final int inputLogicalSize, + keyColVector) throws HiveException, IOException { + +#USE_LINES KEY_VECTOR_VARIABLES + +#USE_LINES PHYSICAL_NULLS_CURRENT_KEY_VARIABLES + + int count = 1; + + for (int batchIndex = 1; batchIndex < inputLogicalSize; batchIndex++) { + + if (keyIsNull[batchIndex]) { + + if (currKeyIsNull) { + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_STAR_ENDED +2 + + // New NULL key. + currKeyIsNull = true; + count = 1; + } + + } else { + +#USE_LINES GET_NEXT_KEY +2 + if (currKeyIsNull) { + + // Current NULL key ended. + currKeyIsNull = false; + + haveNullKey = true; + nullKeyCount += count; + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; +#USE_LINES ELSE_IF_NEXT_EQUALS_CURRENT +2 + + count++; + } else { + + // Current non-NULL key ended. +#USE_LINES CURRENT_COUNT_STAR_ENDED +2 + + // New non-NULL key. +#USE_LINES NEW_CURRENT_KEY +2 + + count = 1; + } + } + } + // Handle last key. +#USE_LINES LAST_NULLS_COUNT_STAR + } + + @Override + protected void outputSingleKeyAndCountPairs( + ColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + +#IF LONG_KEY + outputLongNonZeroKeyAndCountPairs( + (LongColumnVector) keyColumnVector, countColumnVector); +#ENDIF LONG_KEY +#IF STRING_KEY + doOutputStringKeyAndCountPairs( + (BytesColumnVector) keyColumnVector, countColumnVector); +#ENDIF STRING_KEY +#IF SERIALIZE_KEY + doOutputSerializeKeyAndCountPairs( + keyColumnVector, countColumnVector); +#ENDIF SERIALIZE_KEY + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/VectorGroupByCommon.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/VectorGroupByCommon.java new file mode 100644 index 0000000..5cce41e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/VectorGroupByCommon.java @@ -0,0 +1,170 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby; + +import java.lang.reflect.Constructor; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; +import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.BaseWork; +import org.apache.hadoop.hive.ql.plan.GroupByDesc; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.AggregationVariation; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.SingleCountAggregation; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.HashTableKeyType; +import org.apache.hadoop.hive.ql.plan.api.OperatorType; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/** + * This class is common operator class of Native Vectorized GroupBy that has common + * initialization logic. + */ +public abstract class VectorGroupByCommon + extends Operator + implements VectorizationContextRegion, VectorizationOperator { + + private static final long serialVersionUID = 1L; + private static final String CLASS_NAME = VectorGroupByCommon.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + protected VectorGroupByDesc vectorDesc; + + protected VectorGroupByInfo vectorGroupByInfo; + + protected VectorizationContext vContext; + + // Create a new outgoing vectorization context because column name map will change. + protected VectorizationContext vOutContext; + + protected VectorExpression[] groupByKeyExpressions; + + protected VectorAggregationDesc[] vectorAggregationDescs; + + protected AggregationVariation aggregationVariation; + protected SingleCountAggregation singleCountAggregation; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + + // For debug tracing: the name of the map or reduce task. + protected transient String taskName; + + // Debug display. + protected transient long batchCounter; + + public VectorGroupByCommon() { + super(); + } + + public static int INT_PER_LONG_COUNT = Long.SIZE / Integer.SIZE; + + public VectorGroupByCommon(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx); + + GroupByDesc desc = (GroupByDesc) conf; + this.conf = desc; + this.vectorDesc = (VectorGroupByDesc) vectorDesc; + vectorGroupByInfo = this.vectorDesc.getVectorGroupByInfo(); + + this.vContext = vContext; + + vOutContext = new VectorizationContext(getName(), desc.getOutputColumnNames()); + + groupByKeyExpressions = this.vectorDesc.getKeyExpressions(); + + vectorAggregationDescs = this.vectorDesc.getVecAggrDescs(); + + aggregationVariation = vectorGroupByInfo.getAggregationVariation(); + singleCountAggregation = vectorGroupByInfo.getSingleCountAggregation(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + // Determine the name of our map or reduce task for debug tracing. + BaseWork work = Utilities.getMapWork(hconf); + if (work == null) { + work = Utilities.getReduceWork(hconf); + } + taskName = work.getName(); + + batchCounter = 0; + } + + /** + * Implements the getName function for the Node Interface. + * + * @return the name of the operator + */ + @Override + public String getName() { + return getOperatorName(); + } + + static public String getOperatorName() { + return "GBY"; + } + + @Override + public VectorizationContext getOutputVectorizationContext() { + return vOutContext; + } + + @Override + public VectorizationContext getInputVectorizationContext() { + return vContext; + } + + @Override + public VectorDesc getVectorDesc() { + return vectorDesc; + } + + @Override + public OperatorType getType() { + return OperatorType.GROUPBY; + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/VectorGroupByCommonOutput.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/VectorGroupByCommonOutput.java new file mode 100644 index 0000000..277aa25 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/VectorGroupByCommonOutput.java @@ -0,0 +1,169 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.AggregationVariation; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +import com.sun.tools.javac.util.ArrayUtils; + +/** + * This class is common operator class of Native Vectorized GroupBy for output generation. + * Taking the aggregations and filling up the output batch. + */ +public abstract class VectorGroupByCommonOutput + extends VectorGroupByCommon { + + private static final long serialVersionUID = 1L; + private static final String CLASS_NAME = VectorGroupByCommonOutput.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + protected transient VectorizedRowBatch outputBatch; + + private transient VectorizedRowBatchCtx vrbCtx; + + private transient TypeInfo[] outputTypes; + + private transient StandardStructObjectInspector standardOutputObjInspector; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByCommonOutput() { + super(); + } + + public VectorGroupByCommonOutput(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + List objectInspectors = new ArrayList(); + + List outputFieldNames = conf.getOutputColumnNames(); + + final int keyCount = (groupByKeyExpressions == null ? 0 : groupByKeyExpressions.length); + final int aggrCount = (vectorAggregationDescs == null ? 0 : vectorAggregationDescs.length); + outputTypes = new TypeInfo[keyCount + aggrCount]; + int outputTypesIndex = 0; + + for(int i = 0; i < keyCount; ++i) { + TypeInfo outputTypeInfo = groupByKeyExpressions[i].getOutputTypeInfo(); + outputTypes[outputTypesIndex++] = outputTypeInfo; + ObjectInspector objInsp = + TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo( + outputTypeInfo); + objectInspectors.add(objInsp); + } + + for(int i = 0; i < aggrCount; ++i) { + TypeInfo outputTypeInfo = vectorAggregationDescs[i].getOutputTypeInfo(); + outputTypes[outputTypesIndex++] = outputTypeInfo; + ObjectInspector objInsp = + TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo); + objectInspectors.add(objInsp); + } + + standardOutputObjInspector = + ObjectInspectorFactory.getStandardStructObjectInspector(outputFieldNames, objectInspectors); + outputObjInspector = standardOutputObjInspector; + + /** + * Setup the output batch and vectorization context for downstream operators. + * + * NOTE: If we cannot do vectorized output, we still use the outputBatch while + * pulling information out of the hash table. In forwardOutputBatch, we extract rows + * and forward them one by one... + */ + vrbCtx = new VectorizedRowBatchCtx(); + vrbCtx.init(standardOutputObjInspector, vOutContext.getScratchColumnTypeNames()); + outputBatch = vrbCtx.createVectorizedRowBatch(); + } + + public void forwardOutputBatch(VectorizedRowBatch outputBatch) throws HiveException { + + forward(outputBatch, null); + + outputBatch.reset(); + } + + /** + * Copy all of the keys and aggregations to the output batch. + */ + protected abstract void outputGroupBy() throws HiveException; + + /** + * On close, make sure a partially filled overflow batch gets forwarded. + */ + @Override + public void closeOp(boolean aborted) throws HiveException { + super.closeOp(aborted); + if (!aborted) { + outputGroupBy(); + if (outputBatch.size > 0) { + forwardOutputBatch(outputBatch); + } + } + LOG.debug("VectorGroupByCommonOutputOperator closeOp " + batchCounter + " batches processed"); + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashCommon.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashCommon.java new file mode 100644 index 0000000..1733713 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashCommon.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash; + +import java.util.ArrayList; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.groupby.VectorGroupByCommonOutput; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class is common hash operator class of Native Vectorized GroupBy for hash related + * initialization logic. + */ +public abstract class VectorGroupByHashCommon + extends VectorGroupByCommonOutput { + + private static final long serialVersionUID = 1L; + private static final String CLASS_NAME = VectorGroupByHashCommon.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + // Non-transient members initialized by the constructor. They cannot be final due to Kryo. + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + protected transient long maxHashTableMemory; + + protected transient int floorPowerOf2MaxHashTableMemory; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashCommon() { + super(); + } + + public VectorGroupByHashCommon(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + final float memoryPercentage = conf.getGroupByMemoryUsage(); + final int testMaxMemoryAvailable = vectorGroupByInfo.getTestGroupByMaxMemoryAvailable(); + final long maxMemoryAvailable = + (testMaxMemoryAvailable == -1 ? + conf.getMaxMemoryAvailable() : testMaxMemoryAvailable); + maxHashTableMemory = (long) (memoryPercentage * maxMemoryAvailable); + floorPowerOf2MaxHashTableMemory = floorPowerOf2(maxHashTableMemory); + } + + /* + * Return the power of 2 that is equal to or next below a value. + * + * Example: + * 100000b = 2^5 = 32 + * where Long.numberOfLeadingZeros returns (64 - 6) = 58 + * and the result = 5. + * + * Replacing any set of lower 0's with 1's doesn't change the result. + * Or, numbers 32 to 63 return 5. + * + */ + public static int floorPowerOf2(long a) { + if (a == 0) { + return 0; + } + final int floorLeadingZerosCount = Long.numberOfLeadingZeros(a); + final int result = Long.SIZE - floorLeadingZerosCount - 1; + return result; + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashOperatorBase.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashOperatorBase.java new file mode 100644 index 0000000..ca362d2 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashOperatorBase.java @@ -0,0 +1,172 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash; + +import java.io.IOException; +import java.util.ArrayList; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hive.common.util.HashCodeUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/** + * This class is common hash operator class of Native Vectorized GroupBy with common operator + * logic for checking key limits and the common process method logic. + */ +public abstract class VectorGroupByHashOperatorBase + extends VectorGroupByHashTable { + + private static final long serialVersionUID = 1L; + private static final String CLASS_NAME = VectorGroupByHashOperatorBase.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + // Non-transient members initialized by the constructor. They cannot be final due to Kryo. + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + private long flushAndStartOverCount; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashOperatorBase() { + super(); + } + + public VectorGroupByHashOperatorBase(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + flushAndStartOverCount = 0; + } + + public long getFlushAndStartOverCount() { + return flushAndStartOverCount; + } + + protected void checkKeyLimitOncePerBatch(final int inputLogicalSize) + throws HiveException, IOException { + + /* + * Check the hash table key limit for doing the worst case of adding all keys outside the + * inner loop for better performance. + */ + if (keyCount + inputLogicalSize > hashTableKeyCountLimit || + checkLargestNumberOfStepsAboveThresold()) { + flushAndStartOverCount++; + flushAndStartOver(); + if (keyCount + inputLogicalSize > hashTableKeyCountLimit) { + raise2ndHitOutOfStorage(); + } + } + } + + protected void doBeforeMainLoopWork(final int inputLogicalSize) + throws HiveException, IOException { + + /* + * If the hash table has less than the worst-case inputLogicalSize keys that + * could be added, then flush the current hash table entries and clear it. + */ + checkKeyLimitOncePerBatch(inputLogicalSize); + } + + protected abstract void doMainLoop(VectorizedRowBatch batch, final int inputLogicalSize) + throws HiveException, IOException; + + @Override + public void process(Object row, int tag) throws HiveException { + + try { + VectorizedRowBatch batch = (VectorizedRowBatch) row; + + batchCounter++; + + final int inputLogicalSize = batch.size; + + if (inputLogicalSize == 0) { + if (LOG.isDebugEnabled()) { + LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); + } + return; + } + + /* + * Perform any key expressions. Results will go into scratch columns. + */ + if (groupByKeyExpressions != null) { + for (VectorExpression ve : groupByKeyExpressions) { + ve.evaluate(batch); + } + } + + doBeforeMainLoopWork(inputLogicalSize); + + doMainLoop(batch, inputLogicalSize); + + } catch (Exception e) { + throw new HiveException(e); + } + } + + protected void raise2ndHitOutOfStorage() throws HiveException { + throw new HiveException( + "After flushing hash table and clearing, there still isn't enough storage?"); + } + + protected void flushAndStartOver() throws HiveException, IOException { + + outputGroupByAndClearAll(); + + // reallocateHashTable(); + } + + @Override + public void outputGroupByAndClearAll() throws HiveException { + + outputGroupBy(); + if (outputBatch.size > 0) { + forwardOutputBatch(outputBatch); + } + clearHashTable(); + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashTable.java new file mode 100644 index 0000000..7db80ba --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/VectorGroupByHashTable.java @@ -0,0 +1,163 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash; + +import java.io.IOException; +import java.util.Arrays; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/** + * This class is common hash operator class of Native Vectorized GroupBy for the hash tables. + */ +public abstract class VectorGroupByHashTable + extends VectorGroupByHashCommon { + + private static final long serialVersionUID = 1L; + private static final String CLASS_NAME = VectorGroupByHashTable.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + // The logical size and power of 2 mask of the hash table + protected transient int logicalHashBucketCount; + protected transient int logicalHashBucketMask; + + // The maximum number of keys we'll keep in the hash table before flushing. + protected transient int hashTableKeyCountLimit; + + protected transient long[] slotMultiples; + + protected transient int keyCount; + protected transient int largestNumberOfSteps; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashTable() { + super(); + } + + public VectorGroupByHashTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + } + + protected void clearHashTable() { + Arrays.fill(slotMultiples, 0, slotPhysicalArraySize, 0); + keyCount = 0; + largestNumberOfSteps = 0; + } + + private static int LARGEST_NUMBER_OF_STEPS_THRESHOLD = 6; + + public boolean checkLargestNumberOfStepsAboveThresold() { + return (largestNumberOfSteps > LARGEST_NUMBER_OF_STEPS_THRESHOLD); + } + + public void outputGroupByAndClearAll() throws HiveException, IOException { + + outputGroupBy(); + + clearHashTable(); + } + + // The number of longs in the hash table slot array. It is the logical size * entries per slot. + protected int slotPhysicalArraySize; + + // Since a maximum integer is 2^N - 1 it cannot be used we need one less than number of + // Integer bits. + private final static int MAX_POWER_OF_2_FOR_INT_INDEXING = Integer.SIZE - 1; + + // Make sure we have comfortable room for at least one batch of new keys to support the + // VectorGroupByHashOperatorBase.checkKeyLimitOncePerBatch method. + private final static int MIN_POWER_OF_2_SIZE = + floorPowerOf2(VectorizedRowBatch.DEFAULT_SIZE * 16L); + + // An arbitrary factor to divide the slot table size by to get the key count limit. + // Hitting the key count limit will cause the hash table to be flushed to Reduce and cleared + // for refilling. + private final static int KEY_COUNT_FACTOR = 8; + + /* + * For now, we are just allocating the slot table array. + * FUTURE: We'll need to revisit these calculations when we support STRING keys. + */ + protected void allocateBucketArray(int multiplier) { + + // No matter how much memory they want to give us, our array is limited to int indexing. + int maxPowerOf2Memory = + Math.min(floorPowerOf2MaxHashTableMemory, MAX_POWER_OF_2_FOR_INT_INDEXING); + + // UNDONE: Artificially limit for now... + maxPowerOf2Memory = Math.min(maxPowerOf2Memory, 25); + + final int powerOf2Memory = + Math.max(maxPowerOf2Memory, MIN_POWER_OF_2_SIZE); + + /* + * CONCERN: + * Do we really want a hash table to use the maximum supplied memory immediately? + * That could waste memory that other operators could use. And, cause Java GC + * issues because of how large the single slot table array is. Large hash tables + * with small keys sets could cause lots of unnecessary cold RAM hits. There is a tension + * here, of course. Too small a table and there will be more insert collisions. + * + * In contrast, the current VectorGroupByOperator and GroupByOperator classes use a + * Java HeapMap which automatically grows over time. + * + * The issues here are similar to MapJoin, except we have the possibility of using a smaller + * hash table and flushing everything to Reduce. Then, creating a larger slot table instead + * of zeroing the current one. MapJoin cannot flush -- it either needs to expand its + * hash tables to hold everything or spill some of the data to secondary storage (Hybrid Grace). + */ + + slotPhysicalArraySize = 1 << powerOf2Memory; + + logicalHashBucketCount = slotPhysicalArraySize / multiplier; + logicalHashBucketMask = logicalHashBucketCount - 1; + + hashTableKeyCountLimit = logicalHashBucketCount / KEY_COUNT_FACTOR; + + slotMultiples = new long[slotPhysicalArraySize]; + + keyCount = 0; + largestNumberOfSteps = 0; + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/byteskey/duplicatereduction/VectorGroupByHashBytesKeyDuplicateReductionTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/byteskey/duplicatereduction/VectorGroupByHashBytesKeyDuplicateReductionTable.java new file mode 100644 index 0000000..d66e917 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/byteskey/duplicatereduction/VectorGroupByHashBytesKeyDuplicateReductionTable.java @@ -0,0 +1,171 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.byteskey.duplicatereduction; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.Future; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.singlekey.duplicatereduction.VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.keystore.VectorKeyStore; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; +import org.apache.hive.common.util.HashCodeUtil; + +/* + * A single long key hash table optimized for COUNT(key-column) and COUNT(non-key-column) + * Native Vectorized GroupBy. + */ +public abstract class VectorGroupByHashBytesKeyDuplicateReductionTable + extends VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase { + + private static final long serialVersionUID = 1L; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + private transient VectorKeyStore keyStore; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashBytesKeyDuplicateReductionTable() { + super(); + } + + public VectorGroupByHashBytesKeyDuplicateReductionTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + // UNDONE: Size? + keyStore = new VectorKeyStore(1000000); + } + + @Override + protected void clearHashTable() { + super.clearHashTable(); + + // UNDONE: Size? + keyStore = new VectorKeyStore(1000000); + } + + //------------------------------------------------------------------------------------------------ + + protected static int BYTES_DUPLICATE_REDUCTION_ENTRY_SIZE = 2; + + public void findOrCreateBytesDuplicateReductionKey(byte[] keyBytes, int keyStart, int keyLength, + long hashCode) + throws HiveException, IOException { + + int intHashCode = (int) hashCode; + int slot = (intHashCode & logicalHashBucketMask); + long probeSlot = slot; + int i = 0; + int pairIndex; + while (true) { + pairIndex = 2 * slot; + if (slotMultiples[pairIndex] == 0) { + break; + } + if (hashCode == slotMultiples[pairIndex + 1] && + keyStore.unsafeEqualKey(slotMultiples[pairIndex], keyBytes, keyStart, keyLength)) { + // Found it! A duplicate has now been eliminated. + return; + } + // Some other key (collision) - keep probing. + probeSlot += (++i); + if (largestNumberOfSteps < i) { + largestNumberOfSteps = i; + /* + if (checkLargestNumberOfStepsAboveThresold()) { + System.out.println( + "*DEBUG* New largestNumberOfSteps " + largestNumberOfSteps + + " logicalHashBucketCount " + logicalHashBucketCount + + " keyCount " + keyCount + + " hashCode 0x" + Integer.toHexString(intHashCode)); + } + */ + } + slot = (int) (probeSlot & logicalHashBucketMask); + } + + // First entry. + slotMultiples[pairIndex] = keyStore.add(keyBytes, keyStart, keyLength); + slotMultiples[pairIndex + 1] = hashCode; + + keyCount++; + + } + + private int countKeyPairIndex; + private WriteBuffers.Position keyReadPos; + private ByteSegmentRef keyByteSegmentRef; + + protected int initBytesKeyIterator() { + countKeyPairIndex = 0; + keyReadPos = new WriteBuffers.Position(); + keyByteSegmentRef = new ByteSegmentRef(); + return keyCount; + } + + // Read next key. + protected void readNext() { + while (true) { + final long keyRef = slotMultiples[countKeyPairIndex]; + if (keyRef != 0) { + keyStore.getKey( + keyRef, + keyByteSegmentRef, + keyReadPos); + + countKeyPairIndex += 2; + return; + } + countKeyPairIndex += 2; + } + } + + public byte[] getKeyBytes() { + return keyByteSegmentRef.getBytes(); + } + + public int getKeyBytesOffset() { + return (int) keyByteSegmentRef.getOffset(); + } + + public int getKeyBytesLength() { + return keyByteSegmentRef.getLength(); + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/byteskey/singlecount/VectorGroupByHashBytesKeySingleCountTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/byteskey/singlecount/VectorGroupByHashBytesKeySingleCountTable.java new file mode 100644 index 0000000..12b626f --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/byteskey/singlecount/VectorGroupByHashBytesKeySingleCountTable.java @@ -0,0 +1,188 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.byteskey.singlecount; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.Future; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.singlekey.singlecount.VectorGrouoByHashSingleKeySingleCountOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.keystore.VectorKeyStore; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; +import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; +import org.apache.hive.common.util.HashCodeUtil; + +/* + * A single long key hash table optimized for COUNT(key-column) and COUNT(non-key-column) + * Native Vectorized GroupBy. + */ +public abstract class VectorGroupByHashBytesKeySingleCountTable + extends VectorGrouoByHashSingleKeySingleCountOperatorBase { + + private static final long serialVersionUID = 1L; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + private transient VectorKeyStore keyStore; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashBytesKeySingleCountTable() { + super(); + } + + public VectorGroupByHashBytesKeySingleCountTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + // UNDONE: Size? + keyStore = new VectorKeyStore(1000000); + } + + @Override + protected void clearHashTable() { + super.clearHashTable(); + + // UNDONE: Size? + keyStore = new VectorKeyStore(1000000); + } + + //------------------------------------------------------------------------------------------------ + + protected static int BYTES_ENTRY_SIZE = 3; + + public void findOrCreateBytesKey(byte[] keyBytes, int keyStart, int keyLength, + long hashCode, int count) + throws HiveException, IOException { + + int intHashCode = (int) hashCode; + int slot = (intHashCode & logicalHashBucketMask); + long probeSlot = slot; + int i = 0; + int tripleIndex; + boolean isNewKey; + while (true) { + tripleIndex = 3 * slot; + if (slotMultiples[tripleIndex] == 0) { + isNewKey = true; + break; + } + if (hashCode == slotMultiples[tripleIndex + 1] && + keyStore.unsafeEqualKey(slotMultiples[tripleIndex], keyBytes, keyStart, keyLength)) { + isNewKey = false; + break; + } + // Some other key (collision) - keep probing. + probeSlot += (++i); + if (largestNumberOfSteps < i) { + largestNumberOfSteps = i; + /* + if (checkLargestNumberOfStepsAboveThresold()) { + System.out.println( + "*DEBUG* New largestNumberOfSteps " + largestNumberOfSteps + + " logicalHashBucketCount " + logicalHashBucketCount + + " keyCount " + keyCount + + " hashCode 0x" + Integer.toHexString(intHashCode)); + } + */ + } + slot = (int) (probeSlot & logicalHashBucketMask); + } + + if (isNewKey) { + + // First entry. + slotMultiples[tripleIndex] = keyStore.add(keyBytes, keyStart, keyLength); + slotMultiples[tripleIndex + 1] = hashCode; + slotMultiples[tripleIndex + 2] = count; + + keyCount++; + + } else if (count > 0) { + + slotMultiples[tripleIndex + 2] += count; + } + } + + private int countKeyTripleIndex; + private WriteBuffers.Position keyReadPos; + private ByteSegmentRef keyByteSegmentRef; + private long currentCountKeyCount; + + protected int initBytesKeyIterator() { + countKeyTripleIndex = 0; + keyReadPos = new WriteBuffers.Position(); + keyByteSegmentRef = new ByteSegmentRef(); + currentCountKeyCount = 0; + return keyCount; + } + + // Read next key. + protected void readNext() { + while (true) { + final long keyRef = slotMultiples[countKeyTripleIndex]; + if (keyRef != 0) { + keyStore.getKey( + keyRef, + keyByteSegmentRef, + keyReadPos); + currentCountKeyCount = slotMultiples[countKeyTripleIndex + 2]; + + countKeyTripleIndex += 3; + return; + } + countKeyTripleIndex += 3; + } + } + + public byte[] getKeyBytes() { + return keyByteSegmentRef.getBytes(); + } + + public int getKeyBytesOffset() { + return (int) keyByteSegmentRef.getOffset(); + } + + public int getKeyBytesLength() { + return keyByteSegmentRef.getLength(); + } + + public long getCount() { + return currentCountKeyCount; + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/longkey/duplicatereduction/VectorGroupByHashLongKeyDuplicateReductionTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/longkey/duplicatereduction/VectorGroupByHashLongKeyDuplicateReductionTable.java new file mode 100644 index 0000000..e64f07d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/longkey/duplicatereduction/VectorGroupByHashLongKeyDuplicateReductionTable.java @@ -0,0 +1,179 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.longkey.duplicatereduction; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.Future; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.singlekey.duplicatereduction.VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +/* + * A single long key hash table optimized for Duplicate Reduction Native Vectorized GroupBy */ +public abstract class VectorGroupByHashLongKeyDuplicateReductionTable + extends VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase { + + private static final long serialVersionUID = 1L; + + protected int keyColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + protected boolean haveZeroKey; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashLongKeyDuplicateReductionTable() { + super(); + + keyColumnNum = -1; + } + + public VectorGroupByHashLongKeyDuplicateReductionTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + keyColumnNum = groupByKeyExpressions[0].getOutputColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + haveZeroKey = false; + + allocateBucketArray(LONG_DUPLICATE_REDUCTION_ENTRY_SIZE); + } + + // UNDONE: Use other method. + @Override + public void outputGroupByAndClearAll() throws HiveException { + super.outputGroupByAndClearAll(); + + haveZeroKey = false; + } + + //------------------------------------------------------------------------------------------------ + + protected static int LONG_DUPLICATE_REDUCTION_ENTRY_SIZE = 1; + + public void findOrCreateLongDuplicateReductionKey(long key, long hashCode) + throws HiveException, IOException { + + int intHashCode = (int) hashCode; + int slot = (intHashCode & logicalHashBucketMask); + long probeSlot = slot; + int i = 0; + while (true) { + if (slotMultiples[slot] == 0) { + break; + } + if (key == slotMultiples[slot]) { + // Found it! A duplicate has now been eliminated. + return; + } + // Some other key (collision) - keep probing. + probeSlot += (++i); + if (largestNumberOfSteps < i) { + largestNumberOfSteps = i; + /* + if (checkLargestNumberOfStepsAboveThresold()) { + System.out.println( + "*DEBUG* New largestNumberOfSteps " + largestNumberOfSteps + + " logicalHashBucketCount " + logicalHashBucketCount + + " keyCount " + keyCount + + " hashCode 0x" + Integer.toHexString(intHashCode)); + } + */ + } + slot = (int)(probeSlot & logicalHashBucketMask); + } + + // Create first-time key. + slotMultiples[slot] = key; + keyCount++; + } + + private int countKeyIndex; + + protected int initLongDuplicateReductionKeyIterator() { + countKeyIndex = 0; + return keyCount; + } + + // Find next key and return it. + protected long getNext() { + while (true) { + long key = slotMultiples[countKeyIndex++]; + if (key != 0) { + return key; + } + } + } + + protected void doOutputLongKeys( + LongColumnVector keyColumnVector) throws HiveException { + + long[] keyVector = keyColumnVector.vector; + + if (haveZeroKey) { + + // Zero key to deal with. + + // Is the outputBatch already full? + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + keyVector[outputBatch.size++] = 0; + } + + // Use the iterator to race down the slot table array and pull long key and count out of each + // slot entry and store in the output batch. + int keyCount = initLongDuplicateReductionKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int i = startBatchIndex; i < startBatchIndex + count; i++) { + keyVector[i] = getNext(); + } + outputBatch.size += count; + keyCount -= count; + } + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/longkey/singlecount/VectorGroupByHashLongKeySingleCountTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/longkey/singlecount/VectorGroupByHashLongKeySingleCountTable.java new file mode 100644 index 0000000..2e7a8f4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/longkey/singlecount/VectorGroupByHashLongKeySingleCountTable.java @@ -0,0 +1,312 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.longkey.singlecount; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.Future; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.singlekey.singlecount.VectorGrouoByHashSingleKeySingleCountOperatorBase; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +/* + * Single long key hash tables optimized for: + * 1) COUNT(*) Native Vectorized GroupBy. + * 2) COUNT(key-column) and COUNT(non-key-column) Native Vectorized GroupBy + */ +public abstract class VectorGroupByHashLongKeySingleCountTable + extends VectorGrouoByHashSingleKeySingleCountOperatorBase { + + private static final long serialVersionUID = 1L; + + protected int keyColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashLongKeySingleCountTable() { + super(); + + keyColumnNum = -1; + } + + public VectorGroupByHashLongKeySingleCountTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + keyColumnNum = groupByKeyExpressions[0].getOutputColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + } + + //------------------------------------------------------------------------------------------------ + + protected static int LONG_NON_ZERO_COUNT_ENTRY_SIZE = 2; + + public void findOrCreateLongNonZeroCountKey(long key, long hashCode, int count) + throws HiveException, IOException { + + int intHashCode = (int) hashCode; + int slot = (intHashCode & logicalHashBucketMask); + long probeSlot = slot; + int i = 0; + boolean isNewKey; + int pairIndex = 0; + while (true) { + pairIndex = 2 * slot; + if (slotMultiples[pairIndex + 1] == 0) { + isNewKey = true; + break; + } + if (key == slotMultiples[pairIndex]) { + isNewKey = false; + break; + } + // Some other key (collision) - keep probing. + probeSlot += (++i); + if (largestNumberOfSteps < i) { + largestNumberOfSteps = i; + /* + if (checkLargestNumberOfStepsAboveThresold()) { + System.out.println( + "*DEBUG* New largestNumberOfSteps " + largestNumberOfSteps + + " logicalHashBucketCount " + logicalHashBucketCount + + " keyCount " + keyCount + + " hashCode 0x" + Integer.toHexString(intHashCode)); + } + */ + } + slot = (int)(probeSlot & logicalHashBucketMask); + } + + if (isNewKey) { + slotMultiples[pairIndex] = key; + keyCount++; + slotMultiples[pairIndex + 1] = count; + } else { + slotMultiples[pairIndex + 1] += count; + } + } + + private int nonZeroCountPairIndex; + private long currentNonZeroCount; + + protected int initLongNonZeroCountKeyIterator() { + nonZeroCountPairIndex = 0; + currentNonZeroCount = 0; + return keyCount; + } + + // Find next key and return it. + protected long getNextNonZeroCountKey() { + while (true) { + long count = slotMultiples[nonZeroCountPairIndex + 1]; + if (count > 0) { + currentNonZeroCount = count; + long key = slotMultiples[nonZeroCountPairIndex]; + nonZeroCountPairIndex += 2; + return key; + } + nonZeroCountPairIndex += 2; + } + } + + public long getLongNonZeroCount() { + return currentNonZeroCount; + } + + //------------------------------------------------------------------------------------------------ + + /** + * Flush all of the key and count pairs of the one long key non-zero count hash table to the + * output. + */ + protected void outputLongNonZeroKeyAndCountPairs( + LongColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + + boolean[] keyIsNull = keyColumnVector.isNull; + long[] keyVector = keyColumnVector.vector; + boolean[] countIsNull = countColumnVector.isNull; + long[] countVector = countColumnVector.vector; + + // Use the iterator to race down the slot table array and pull long key and count out of each + // slot entry and store in the output batch. + int keyCount = initLongNonZeroCountKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int i = startBatchIndex; i < startBatchIndex + count; i++) { + keyVector[i] = getNextNonZeroCountKey(); + countVector[i] = getLongNonZeroCount(); + } + outputBatch.size += count; + keyCount -= count; + } + } + + //------------------------------------------------------------------------------------------------ + + private static long LONG_KEY_COUNT_KEY_ZERO_HAS_VALUE_MASK = 1L << 63; + + protected static int LONG_ZERO_COUNT_ENTRY_SIZE = 2; + + public void findOrCreateLongZeroCountKey(long key, long hashCode, int count) + throws HiveException, IOException { + + int intHashCode = (int) hashCode; + int slot = (intHashCode & logicalHashBucketMask); + long probeSlot = slot; + int i = 0; + boolean isNewKey; + int pairIndex = 0; + while (true) { + pairIndex = 2 * slot; + if (slotMultiples[pairIndex + 1] == 0) { + isNewKey = true; + break; + } + if (key == slotMultiples[pairIndex]) { + isNewKey = false; + break; + } + // Some other key (collision) - keep probing. + probeSlot += (++i); + if (largestNumberOfSteps < i) { + largestNumberOfSteps = i; + if (checkLargestNumberOfStepsAboveThresold()) { + System.out.println( + "*DEBUG* New largestNumberOfSteps " + largestNumberOfSteps + + " logicalHashBucketCount " + logicalHashBucketCount + + " keyCount " + keyCount + + " hashCode 0x" + Integer.toHexString(intHashCode)); + } + } + slot = (int)(probeSlot & logicalHashBucketMask); + } + + if (isNewKey) { + slotMultiples[pairIndex] = key; + keyCount++; + if (count == 0) { + slotMultiples[pairIndex + 1] = LONG_KEY_COUNT_KEY_ZERO_HAS_VALUE_MASK; + } else { + slotMultiples[pairIndex + 1] = count; + } + } else if (count > 0) { + + // Only update count when we are leaving 0. + if (slotMultiples[pairIndex + 1] == LONG_KEY_COUNT_KEY_ZERO_HAS_VALUE_MASK) { + slotMultiples[pairIndex + 1] = count; + } else { + slotMultiples[pairIndex + 1] += count; + } + } + } + + private int countKeyPairIndex; + private long currentCountKeyCount; + + protected int initLongZeroCountKeyIterator() { + countKeyPairIndex = 0; + currentCountKeyCount = 0; + return keyCount; + } + + // Find next key and return it. + protected long getNextZeroCountKey() { + while (true) { + long count = slotMultiples[countKeyPairIndex + 1]; + if (count != 0) { + if (count == LONG_KEY_COUNT_KEY_ZERO_HAS_VALUE_MASK) { + currentCountKeyCount = 0; + } else { + currentCountKeyCount = count; + } + long key = slotMultiples[countKeyPairIndex]; + countKeyPairIndex += 2; + return key; + } + countKeyPairIndex += 2; + } + } + + public long getCount() { + return currentCountKeyCount; + } + + //------------------------------------------------------------------------------------------------ + + /** + * Flush all of the key and count pairs of the one long key zero count hash table to the + * output. + */ + protected void outputLongZeroCountKeyAndCountPairs( + LongColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + + boolean[] keyIsNull = keyColumnVector.isNull; + long[] keyVector = keyColumnVector.vector; + boolean[] countIsNull = countColumnVector.isNull; + long[] countVector = countColumnVector.vector; + + // Use the iterator to race down the slot table array and pull long key and count out of each + // slot entry and store in the output batch. + int keyCount = initLongZeroCountKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int batchIndex = startBatchIndex; batchIndex < startBatchIndex + count; batchIndex++) { + keyIsNull[batchIndex] = false; + keyVector[batchIndex] = getNextZeroCountKey(); + countIsNull[batchIndex] = false; + countVector[batchIndex] = getCount(); + } + outputBatch.size += count; + keyCount -= count; + } + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/serializekey/duplicatereduction/VectorGroupByHashSerializeKeyDuplicateReductionTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/serializekey/duplicatereduction/VectorGroupByHashSerializeKeyDuplicateReductionTable.java new file mode 100644 index 0000000..d6f62cd --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/serializekey/duplicatereduction/VectorGroupByHashSerializeKeyDuplicateReductionTable.java @@ -0,0 +1,128 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.serializekey.duplicatereduction; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.byteskey.duplicatereduction.VectorGroupByHashBytesKeyDuplicateReductionTable; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +/* + * An single long key map optimized for Native Vectorized GroupByy. + */ +public abstract class VectorGroupByHashSerializeKeyDuplicateReductionTable + extends VectorGroupByHashBytesKeyDuplicateReductionTable { + + private static final long serialVersionUID = 1L; + + protected int keyColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + private transient VectorDeserializeRow keyVectorDeserializeRow; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashSerializeKeyDuplicateReductionTable() { + super(); + + keyColumnNum = -1; + } + + public VectorGroupByHashSerializeKeyDuplicateReductionTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + keyColumnNum = groupByKeyExpressions[0].getOutputColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + TypeInfo[] typeInfos = new TypeInfo[] { groupByKeyExpressions[0].getOutputTypeInfo() }; + keyVectorDeserializeRow = + new VectorDeserializeRow( + new BinarySortableDeserializeRead( + typeInfos, + /* useExternalBuffer */ true)); + // Single key is output column 0. + keyVectorDeserializeRow.init(new int[] { 0 }); + } + + //------------------------------------------------------------------------------------------------ + + /** + * Flush all of the key and count pairs of the one string hash table to the output. + */ + protected void doOutputSerializeKeys( + ColumnVector keyColumnVector) throws HiveException { + + boolean[] keyIsNull = keyColumnVector.isNull; + + // Use the iterator to race down the slot table array and get the bytes key and count out of + // each slot entry and store in the output batch. + int keyCount = initBytesKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int batchIndex = startBatchIndex; batchIndex < startBatchIndex + count; batchIndex++) { + readNext(); + keyIsNull[batchIndex] = false; + keyVectorDeserializeRow.setBytes( + getKeyBytes(), getKeyBytesOffset(), getKeyBytesLength()); + + try { + // Our hash tables are immutable. We can safely do by reference STRING, CHAR/VARCHAR, etc. + keyVectorDeserializeRow.deserializeByRef(outputBatch, batchIndex); + } catch (Exception e) { + throw new HiveException( + "\nDeserializeRead detail: " + + keyVectorDeserializeRow.getDetailedReadPositionString(), + e); + } + } + outputBatch.size += count; + keyCount -= count; + } + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/serializekey/singlecount/VectorGroupByHashSerializeKeySingleCountTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/serializekey/singlecount/VectorGroupByHashSerializeKeySingleCountTable.java new file mode 100644 index 0000000..70a1e3b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/serializekey/singlecount/VectorGroupByHashSerializeKeySingleCountTable.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.serializekey.singlecount; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.byteskey.singlecount.VectorGroupByHashBytesKeySingleCountTable; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +/* + * An single long key map optimized for Native Vectorized GroupByy. + */ +public abstract class VectorGroupByHashSerializeKeySingleCountTable + extends VectorGroupByHashBytesKeySingleCountTable { + + private static final long serialVersionUID = 1L; + + protected int keyColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + private transient VectorDeserializeRow keyVectorDeserializeRow; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashSerializeKeySingleCountTable() { + super(); + + keyColumnNum = -1; + } + + public VectorGroupByHashSerializeKeySingleCountTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + keyColumnNum = groupByKeyExpressions[0].getOutputColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + TypeInfo[] typeInfos = new TypeInfo[] { groupByKeyExpressions[0].getOutputTypeInfo() }; + keyVectorDeserializeRow = + new VectorDeserializeRow( + new BinarySortableDeserializeRead( + typeInfos, + /* useExternalBuffer */ true)); + // Single key is output column 0. + keyVectorDeserializeRow.init(new int[] { 0 }); + } + + //------------------------------------------------------------------------------------------------ + + /** + * Flush all of the key and count pairs of the one string hash table to the output. + */ + protected void doOutputSerializeKeyAndCountPairs( + ColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + + boolean[] keyIsNull = keyColumnVector.isNull; + boolean[] countIsNull = countColumnVector.isNull; + long[] countVector = countColumnVector.vector; + + // Use the iterator to race down the slot table array and get the bytes key and count out of + // each slot entry and store in the output batch. + int keyCount = initBytesKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int batchIndex = startBatchIndex; batchIndex < startBatchIndex + count; batchIndex++) { + readNext(); + keyIsNull[batchIndex] = false; + keyVectorDeserializeRow.setBytes( + getKeyBytes(), getKeyBytesOffset(), getKeyBytesLength()); + + try { + // Our hash tables are immutable. We can safely do by reference STRING, CHAR/VARCHAR, etc. + keyVectorDeserializeRow.deserializeByRef(outputBatch, batchIndex); + } catch (Exception e) { + throw new HiveException( + "\nDeserializeRead detail: " + + keyVectorDeserializeRow.getDetailedReadPositionString(), + e); + } + countIsNull[batchIndex] = false; + countVector[batchIndex] = getCount(); + } + outputBatch.size += count; + keyCount -= count; + } + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/singlekey/duplicatereduction/VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/singlekey/duplicatereduction/VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase.java new file mode 100644 index 0000000..0ada445 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/singlekey/duplicatereduction/VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase.java @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.singlekey.duplicatereduction; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; + +/* + * Common single key count operator. + */ +public abstract class VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase + extends VectorGroupByHashOperatorBase { + + private static final long serialVersionUID = 1L; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + protected transient boolean haveNullKey; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase() { + super(); + } + + public VectorGrouoByHashSingleKeyDuplicateReductionOperatorBase(CompilationOpContext ctx, + OperatorDesc conf, VectorizationContext vContext, VectorDesc vectorDesc) + throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + haveNullKey = false; + } + + @Override + public void outputGroupByAndClearAll() throws HiveException { + super.outputGroupByAndClearAll(); + + // No storage to clear! + + haveNullKey = false; + } + + /** + * Flush all of the key and count pairs of the one long key hash table to the + * output. + */ + @Override + protected void outputGroupBy() throws HiveException { + + // Keys come first in the output. + + ColumnVector keyColumnVector = outputBatch.cols[0]; + + if (haveNullKey) { + + // NULL entry to deal with. + + // Is the outputBatch already full? + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + final int nullBatchIndex = outputBatch.size; + keyColumnVector.isNull[nullBatchIndex] = true; + keyColumnVector.noNulls = false; + outputBatch.size++; + } + + outputSingleKeys(keyColumnVector); + } + + protected abstract void outputSingleKeys( + ColumnVector keyColumnVector) throws HiveException; +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/singlekey/singlecount/VectorGrouoByHashSingleKeySingleCountOperatorBase.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/singlekey/singlecount/VectorGrouoByHashSingleKeySingleCountOperatorBase.java new file mode 100644 index 0000000..87fde47 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/singlekey/singlecount/VectorGrouoByHashSingleKeySingleCountOperatorBase.java @@ -0,0 +1,123 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.singlekey.singlecount; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; + +/* + * Common single key count operator. + */ +public abstract class VectorGrouoByHashSingleKeySingleCountOperatorBase + extends VectorGroupByHashOperatorBase { + + private static final long serialVersionUID = 1L; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + protected transient boolean haveNullKey; + + protected transient long nullKeyCount; + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGrouoByHashSingleKeySingleCountOperatorBase() { + super(); + } + + public VectorGrouoByHashSingleKeySingleCountOperatorBase(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + + haveNullKey = false; + nullKeyCount = 0; + } + + @Override + public void outputGroupByAndClearAll() throws HiveException { + super.outputGroupByAndClearAll(); + + // No storage to clear! + + haveNullKey = false; + nullKeyCount = 0; + } + + /** + * Flush all of the key and count pairs of the one long key hash table to the + * output. + */ + @Override + protected void outputGroupBy() throws HiveException { + + // Keys come first in the output. + + ColumnVector keyColumnVector = outputBatch.cols[0]; + + LongColumnVector countKeyColumnVector = (LongColumnVector) outputBatch.cols[1]; + + if (haveNullKey) { + + // NULL entry to deal with. + + // Is the outputBatch already full? + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + final int nullBatchIndex = outputBatch.size; + keyColumnVector.isNull[nullBatchIndex] = true; + keyColumnVector.noNulls = false; + + countKeyColumnVector.isNull[nullBatchIndex] = false; + countKeyColumnVector.vector[nullBatchIndex] = nullKeyCount; + + outputBatch.size++; + } + + outputSingleKeyAndCountPairs(keyColumnVector, countKeyColumnVector); + } + + protected abstract void outputSingleKeyAndCountPairs( + ColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException; +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/stringkey/duplicatereduction/VectorGroupByHashStringKeyDuplicateReductionTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/stringkey/duplicatereduction/VectorGroupByHashStringKeyDuplicateReductionTable.java new file mode 100644 index 0000000..6a113c8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/stringkey/duplicatereduction/VectorGroupByHashStringKeyDuplicateReductionTable.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.stringkey.duplicatereduction; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.byteskey.duplicatereduction.VectorGroupByHashBytesKeyDuplicateReductionTable; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/* + * An single long key map optimized for Native Vectorized GroupByy. + */ +public abstract class VectorGroupByHashStringKeyDuplicateReductionTable + extends VectorGroupByHashBytesKeyDuplicateReductionTable { + + private static final long serialVersionUID = 1L; + + protected int keyColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashStringKeyDuplicateReductionTable() { + super(); + + keyColumnNum = -1; + } + + public VectorGroupByHashStringKeyDuplicateReductionTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + keyColumnNum = groupByKeyExpressions[0].getOutputColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + } + + //------------------------------------------------------------------------------------------------ + + /** + * Flush all of the key and count pairs of the one string key zero count hash table to the + * output. + */ + protected void doOutputStringKeys( + BytesColumnVector keyColumnVector) throws HiveException { + + boolean[] keyIsNull = keyColumnVector.isNull; + + // Use the iterator to race down the slot table array and get the bytes key and count out of + // each slot entry and store in the output batch. + int keyCount = initBytesKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int batchIndex = startBatchIndex; batchIndex < startBatchIndex + count; batchIndex++) { + readNext(); + keyIsNull[batchIndex] = false; + keyColumnVector.setRef( + batchIndex, + getKeyBytes(), getKeyBytesOffset(), getKeyBytesLength()); + } + outputBatch.size += count; + keyCount -= count; + } + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/stringkey/singlecount/VectorGroupByHashStringKeySingleCountTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/stringkey/singlecount/VectorGroupByHashStringKeySingleCountTable.java new file mode 100644 index 0000000..5d8eab3 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/groupby/hash/stringkey/singlecount/VectorGroupByHashStringKeySingleCountTable.java @@ -0,0 +1,112 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.groupby.hash.stringkey.singlecount; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.VectorGroupByHashOperatorBase; +import org.apache.hadoop.hive.ql.exec.vector.groupby.hash.byteskey.singlecount.VectorGroupByHashBytesKeySingleCountTable; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/* + * An single long key map optimized for Native Vectorized GroupByy. + */ +public abstract class VectorGroupByHashStringKeySingleCountTable + extends VectorGroupByHashBytesKeySingleCountTable { + + private static final long serialVersionUID = 1L; + + protected int keyColumnNum; + + // The above members are initialized by the constructor and must not be + // transient. + //--------------------------------------------------------------------------- + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + public VectorGroupByHashStringKeySingleCountTable() { + super(); + + keyColumnNum = -1; + } + + public VectorGroupByHashStringKeySingleCountTable(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + + keyColumnNum = groupByKeyExpressions[0].getOutputColumnNum(); + } + + @Override + protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + } + + //------------------------------------------------------------------------------------------------ + + /** + * Flush all of the key and count pairs of the one string key zero count hash table to the + * output. + */ + protected void doOutputStringKeyAndCountPairs( + BytesColumnVector keyColumnVector, + LongColumnVector countColumnVector) throws HiveException { + + boolean[] keyIsNull = keyColumnVector.isNull; + boolean[] countIsNull = countColumnVector.isNull; + long[] countVector = countColumnVector.vector; + + // Use the iterator to race down the slot table array and get the bytes key and count out of + // each slot entry and store in the output batch. + int keyCount = initBytesKeyIterator(); + while (keyCount > 0) { + if (outputBatch.size == outputBatch.DEFAULT_SIZE) { + forwardOutputBatch(outputBatch); + } + + int startBatchIndex = outputBatch.size; + int count = Math.min(keyCount, outputBatch.DEFAULT_SIZE - startBatchIndex); + + for (int batchIndex = startBatchIndex; batchIndex < startBatchIndex + count; batchIndex++) { + readNext(); + keyIsNull[batchIndex] = false; + keyColumnVector.setRef( + batchIndex, + getKeyBytes(), getKeyBytesOffset(), getKeyBytesLength()); + countIsNull[batchIndex] = false; + countVector[batchIndex] = getCount(); + } + outputBatch.size += count; + keyCount -= count; + } + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/keystore/VectorKeyStore.java similarity index 82% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/vector/keystore/VectorKeyStore.java index b6684e0..e0af5b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/keystore/VectorKeyStore.java @@ -16,18 +16,15 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; +package org.apache.hadoop.hive.ql.exec.vector.keystore; import org.apache.hadoop.hive.common.MemoryEstimate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; // Optimized for sequential key lookup. -public class VectorMapJoinFastKeyStore implements MemoryEstimate { - - private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastKeyStore.class.getName()); +public class VectorKeyStore implements MemoryEstimate { private WriteBuffers writeBuffers; @@ -124,13 +121,11 @@ public boolean unsafeEqualKey(long keyRefWord, byte[] keyBytes, int keyStart, in public boolean equalKey(long keyRefWord, byte[] keyBytes, int keyStart, int keyLength, WriteBuffers.Position readPos) { - int storedKeyLengthLength = + int storedKeyLength = (int) ((keyRefWord & SmallKeyLength.bitMask) >> SmallKeyLength.bitShift); - boolean isKeyLengthSmall = (storedKeyLengthLength != SmallKeyLength.allBitsOn); - - // LOG.debug("VectorMapJoinFastKeyStore equalKey keyLength " + keyLength + " isKeyLengthSmall " + isKeyLengthSmall + " storedKeyLengthLength " + storedKeyLengthLength + " keyRefWord " + Long.toHexString(keyRefWord)); + boolean isKeyLengthSmall = (storedKeyLength != SmallKeyLength.allBitsOn); - if (isKeyLengthSmall && storedKeyLengthLength != keyLength) { + if (isKeyLengthSmall && storedKeyLength != keyLength) { return false; } long absoluteKeyOffset = @@ -139,9 +134,8 @@ public boolean equalKey(long keyRefWord, byte[] keyBytes, int keyStart, int keyL writeBuffers.setReadPoint(absoluteKeyOffset, readPos); if (!isKeyLengthSmall) { // Read big value length we wrote with the value. - storedKeyLengthLength = writeBuffers.readVInt(readPos); - if (storedKeyLengthLength != keyLength) { - // LOG.debug("VectorMapJoinFastKeyStore equalKey no match big length"); + storedKeyLength = writeBuffers.readVInt(readPos); + if (storedKeyLength != keyLength) { return false; } } @@ -152,16 +146,15 @@ public boolean equalKey(long keyRefWord, byte[] keyBytes, int keyStart, int keyL return false; } - // LOG.debug("VectorMapJoinFastKeyStore equalKey match on bytes"); return true; } - public VectorMapJoinFastKeyStore(int writeBuffersSize) { + public VectorKeyStore(int writeBuffersSize) { writeBuffers = new WriteBuffers(writeBuffersSize, AbsoluteKeyOffset.maxSize); unsafeReadPos = new WriteBuffers.Position(); } - public VectorMapJoinFastKeyStore(WriteBuffers writeBuffers) { + public VectorKeyStore(WriteBuffers writeBuffers) { // TODO: Check if maximum size compatible with AbsoluteKeyOffset.maxSize. this.writeBuffers = writeBuffers; unsafeReadPos = new WriteBuffers.Position(); @@ -174,4 +167,23 @@ public long getEstimatedMemorySize() { size += unsafeReadPos == null ? 0 : unsafeReadPos.getEstimatedMemorySize(); return size; } + + public void getKey(long keyRefWord, ByteSegmentRef keyByteSegmentRef, + WriteBuffers.Position readPos) { + + int storedKeyLength = + (int) ((keyRefWord & SmallKeyLength.bitMask) >> SmallKeyLength.bitShift); + boolean isKeyLengthSmall = (storedKeyLength != SmallKeyLength.allBitsOn); + + long absoluteKeyOffset = + (keyRefWord & AbsoluteKeyOffset.bitMask); + + writeBuffers.setReadPoint(absoluteKeyOffset, readPos); + if (!isKeyLengthSmall) { + // Read big value length we wrote with the value. + storedKeyLength = writeBuffers.readVInt(readPos); + } + writeBuffers.getByteSegmentRefToCurrent(keyByteSegmentRef, storedKeyLength, readPos); + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java index 57db136..d251aa5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java @@ -23,6 +23,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.vector.keystore.VectorKeyStore; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -105,7 +106,7 @@ public VectorMapJoinFastBytesHashMap( valueStore = new VectorMapJoinFastValueStore(writeBuffersSize); // Share the same write buffers with our value store. - keyStore = new VectorMapJoinFastKeyStore(valueStore.writeBuffers()); + keyStore = new VectorKeyStore(valueStore.writeBuffers()); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java index 726fd29..b284a83 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java @@ -23,6 +23,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.vector.keystore.VectorKeyStore; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMultiSet; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMultiSetResult; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -57,10 +58,8 @@ public void assignSlot(int slot, byte[] keyBytes, int keyStart, int keyLength, slotTriples[tripleIndex] = keyStore.add(keyBytes, keyStart, keyLength); slotTriples[tripleIndex + 1] = hashCode; slotTriples[tripleIndex + 2] = 1; // Count. - // LOG.debug("VectorMapJoinFastBytesHashMap add first keyRefWord " + Long.toHexString(slotTriples[tripleIndex]) + " hashCode " + Long.toHexString(slotTriples[tripleIndex + 1]) + " valueRefWord " + Long.toHexString(slotTriples[tripleIndex + 2])); } else { // Add another value. - // LOG.debug("VectorMapJoinFastBytesHashMap add more keyRefWord " + Long.toHexString(slotTriples[tripleIndex]) + " hashCode " + Long.toHexString(slotTriples[tripleIndex + 1]) + " valueRefWord " + Long.toHexString(slotTriples[tripleIndex + 2])); slotTriples[tripleIndex + 2]++; } } @@ -95,7 +94,7 @@ public VectorMapJoinFastBytesHashMultiSet( int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); - keyStore = new VectorMapJoinFastKeyStore(writeBuffersSize); + keyStore = new VectorKeyStore(writeBuffersSize); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java index 5d750a8..52801e2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java @@ -21,6 +21,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.vector.keystore.VectorKeyStore; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashSet; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashSetResult; import org.apache.hadoop.io.BytesWritable; @@ -82,7 +83,7 @@ public VectorMapJoinFastBytesHashSet( int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); - keyStore = new VectorMapJoinFastKeyStore(writeBuffersSize); + keyStore = new VectorKeyStore(writeBuffersSize); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java index f2b794f..15dd125 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.ql.exec.vector.keystore.VectorKeyStore; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashTable; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.WriteBuffers; @@ -40,7 +41,7 @@ private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastBytesHashTable.class); - protected VectorMapJoinFastKeyStore keyStore; + protected VectorKeyStore keyStore; protected BytesWritable testKeyBytesWritable; @@ -68,15 +69,13 @@ public void add(byte[] keyBytes, int keyStart, int keyLength, BytesWritable curr int i = 0; boolean isNewKey; while (true) { - int tripleIndex = 3 * slot; + final int tripleIndex = 3 * slot; if (slotTriples[tripleIndex] == 0) { - // LOG.debug("VectorMapJoinFastBytesHashMap findWriteSlot slot " + slot + " tripleIndex " + tripleIndex + " empty"); isNewKey = true;; break; } if (hashCode == slotTriples[tripleIndex + 1] && keyStore.unsafeEqualKey(slotTriples[tripleIndex], keyBytes, keyStart, keyLength)) { - // LOG.debug("VectorMapJoinFastBytesHashMap findWriteSlot slot " + slot + " tripleIndex " + tripleIndex + " existing"); isNewKey = false; break; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index d3fbf07..169087e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -93,6 +93,18 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.io.NullRowsInputFormat; import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashLongKeyDuplicateReductionOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashLongKeySingleCountColumnOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashSerializeKeyDuplicateReductionOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashSerializeKeySingleCountColumnOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashStringKeyDuplicateReductionOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashStringKeySingleCountColumnOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashLongKeySingleCountKeyOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashSerializeKeySingleCountKeyOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashStringKeySingleCountKeyOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashLongKeySingleCountStarOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashSerializeKeySingleCountStarOperator; +import org.apache.hadoop.hive.ql.exec.vector.groupby.operator.gen.VectorGroupByHashStringKeySingleCountStarOperator; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.lib.Dispatcher; @@ -128,9 +140,13 @@ import org.apache.hadoop.hive.ql.plan.VectorDesc; import org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc; import org.apache.hadoop.hive.ql.plan.VectorFilterDesc; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.AggregationVariation; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc; import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.SingleCountAggregation; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.SingleCountAggregation.SingleCountAggregationKind; import org.apache.hadoop.hive.ql.plan.VectorTableScanDesc; import org.apache.hadoop.hive.ql.plan.VectorizationCondition; import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc.ProcessingMode; @@ -224,6 +240,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.mapred.InputFormat; @@ -303,6 +320,12 @@ private VectorizationEnabledOverride vectorizationEnabledOverride; boolean isTestForcedVectorizationEnable; + boolean isVectorizationGroupByNativeEnabled; + private VectorizationEnabledOverride vectorizationGroupByNativeEnabledOverride; + boolean isTestForcedVectorizationGroupByNativeEnable; + boolean weCanAttemptGroupByNativeVectorization; + int testGroupByMaxMemoryAvailable; + private boolean useVectorizedInputFileFormat; private boolean useVectorDeserialize; private boolean useRowDeserialize; @@ -2220,6 +2243,44 @@ public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticE return physicalContext; } + // Native Vector GROUP BY. + isVectorizationGroupByNativeEnabled = + HiveConf.getBoolVar(hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_NATIVE_ENABLED); + + final String testVectorizationGroupByNativeOverrideString = + HiveConf.getVar(hiveConf, + HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_GROUPBY_NATIVE_OVERRIDE); + vectorizationGroupByNativeEnabledOverride = + VectorizationEnabledOverride.nameMap.get(testVectorizationGroupByNativeOverrideString); + + isTestForcedVectorizationGroupByNativeEnable = false; + switch (vectorizationGroupByNativeEnabledOverride) { + case NONE: + weCanAttemptGroupByNativeVectorization = isVectorizationGroupByNativeEnabled; + break; + case DISABLE: + weCanAttemptGroupByNativeVectorization = false; + break; + case ENABLE: + weCanAttemptGroupByNativeVectorization = true; + isTestForcedVectorizationGroupByNativeEnable = !isVectorizationGroupByNativeEnabled; + + // Different parts of the code rely on this being set... + HiveConf.setBoolVar(hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_NATIVE_ENABLED, true); + isVectorizationGroupByNativeEnabled = true; + break; + default: + throw new RuntimeException("Unexpected vectorization enabled override " + + vectorizationGroupByNativeEnabledOverride); + } + + testGroupByMaxMemoryAvailable = + HiveConf.getIntVar(hiveConf, + HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_GROUPBY_NATIVE_MAX_MEMORY_AVAILABLE); + + // Input Format control. useVectorizedInputFileFormat = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT); @@ -3662,6 +3723,261 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi return result; } + private Operator specializeGroupByOperator( + Operator op, VectorizationContext vContext, + GroupByDesc desc, VectorGroupByDesc vectorDesc) + throws HiveException { + + VectorGroupByInfo vectorGroupByInfo = vectorDesc.getVectorGroupByInfo(); + + Operator vectorOp = null; + Class> opClass = null; + + VectorGroupByInfo.HashTableKeyType hashTableKeyType = + vectorGroupByInfo.getHashTableKeyType(); + + AggregationVariation aggregationVariation = vectorGroupByInfo.getAggregationVariation(); + switch (aggregationVariation) { + case HASH_DUPLICATE_REDUCTION: + switch (hashTableKeyType) { + case LONG: + opClass = VectorGroupByHashLongKeyDuplicateReductionOperator.class; + break; + case STRING: + opClass = VectorGroupByHashStringKeyDuplicateReductionOperator.class; + break; + case SERIALIZE: + opClass = VectorGroupByHashSerializeKeyDuplicateReductionOperator.class; + break; + default: + throw new RuntimeException( + "Unexpected hash table type " + hashTableKeyType); + } + break; + + case HASH_SINGLE_COUNT: + { + SingleCountAggregationKind singleCountAggregationKind = + vectorGroupByInfo.getSingleCountAggregation().getSingleCountAggregationKind(); + + switch (singleCountAggregationKind) { + case COUNT_STAR: + switch (hashTableKeyType) { + case LONG: + opClass = VectorGroupByHashLongKeySingleCountStarOperator.class; + break; + case STRING: + opClass = VectorGroupByHashStringKeySingleCountStarOperator.class; + break; + case SERIALIZE: + opClass = VectorGroupByHashSerializeKeySingleCountStarOperator.class; + break; + default: + throw new RuntimeException( + "Unexpected hash table type " + hashTableKeyType); + } + break; + case COUNT_KEY: + switch (hashTableKeyType) { + case LONG: + opClass = VectorGroupByHashLongKeySingleCountKeyOperator.class; + break; + case STRING: + opClass = VectorGroupByHashStringKeySingleCountKeyOperator.class; + break; + case SERIALIZE: + opClass = VectorGroupByHashSerializeKeySingleCountKeyOperator.class; + break; + default: + throw new RuntimeException( + "Unexpected hash table type " + hashTableKeyType); + } + break; + case COUNT_COLUMN: + switch (hashTableKeyType) { + case LONG: + opClass = VectorGroupByHashLongKeySingleCountColumnOperator.class; + break; + case STRING: + opClass = VectorGroupByHashStringKeySingleCountColumnOperator.class; + break; + case SERIALIZE: + opClass = VectorGroupByHashSerializeKeySingleCountColumnOperator.class; + break; + default: + throw new RuntimeException( + "Unexpected hash table type " + hashTableKeyType); + } + break; + default: + throw new RuntimeException( + "Unexpected single count aggregation kind " + singleCountAggregationKind); + } + } + break; + + default: + throw new RuntimeException("Unexpected aggregation variation " + aggregationVariation); + } + + vectorDesc.setVectorGroupByInfo(vectorGroupByInfo); + + vectorDesc.setIsNative(true); + + vectorOp = OperatorFactory.getVectorOperator( + opClass, op.getCompilationOpContext(), desc, vContext, vectorDesc); + LOG.info("Vectorizer vectorizeOperator group by class " + vectorOp.getClass().getSimpleName()); + + return vectorOp; + } + + private boolean canSpecializeGroupBy(GroupByDesc desc, VectorGroupByDesc vectorDesc, + boolean isTezOrSpark, VectorizationContext vContext) throws HiveException { + + String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); + + VectorGroupByInfo vectorGroupByInfo = new VectorGroupByInfo(); + + List vectorizationIssueList = new ArrayList(); + + List keyDescs = desc.getKeys(); + final boolean isEmptyKey = keyDescs.isEmpty(); + final int outputKeyLength = keyDescs.size(); + + GroupByDesc.Mode groupByMode = desc.getMode(); + ProcessingMode processingMode = vectorDesc.getProcessingMode(); + + VectorExpression[] vecKeyExprs = vectorDesc.getKeyExpressions(); + final int vecKeyExprSize = vecKeyExprs.length; + + VectorAggregationDesc[] vecAggrDescs = vectorDesc.getVecAggrDescs(); + final int vecAggrDescSize = (vecAggrDescs == null ? 0 : vecAggrDescs.length); + + List aggrDescList = desc.getAggregators(); + + boolean isHash = (groupByMode == GroupByDesc.Mode.HASH); + final AggregationVariation aggregationVariation; + + SingleCountAggregation singleCountAggregation = null; + + if (isHash && vecAggrDescSize == 0) { + + // No aggregations just means the key is being grouped. We are getting rid of duplicate keys. + + aggregationVariation = AggregationVariation.HASH_DUPLICATE_REDUCTION; + singleCountAggregation = null; + + } else if (isHash && vecKeyExprSize == 1 && vecAggrDescSize == 1 && + aggrDescList.get(0).getGenericUDAFName().equalsIgnoreCase("count")) { + + // Single COUNT aggregation specialization. Store key and count in hash table without a + // hash element. + + AggregationDesc countAggrDesc = aggrDescList.get(0); + List countParamList = countAggrDesc.getParameters(); + final int countParamSize = countParamList.size(); + if (countParamSize == 0) { + + // COUNT(*) + + aggregationVariation = AggregationVariation.HASH_SINGLE_COUNT; + singleCountAggregation = + new SingleCountAggregation(SingleCountAggregationKind.COUNT_STAR); + + } else if (countParamSize == 1) { + + aggregationVariation = AggregationVariation.HASH_SINGLE_COUNT; + + VectorAggregationDesc countVecAggrDesc = vecAggrDescs[0]; + + final int inputColumnNum = countVecAggrDesc.getInputExpression().getOutputColumnNum(); + + boolean isKey = false; + for (VectorExpression vecKeyExpr : vecKeyExprs) { + if (vecKeyExpr.getOutputColumnNum() == inputColumnNum) { + isKey = true; + break; + } + } + if (isKey) { + singleCountAggregation = + new SingleCountAggregation(SingleCountAggregationKind.COUNT_KEY); + } else { + singleCountAggregation = + new SingleCountAggregation(SingleCountAggregationKind.COUNT_COLUMN, inputColumnNum); + } + } else { + + aggregationVariation = AggregationVariation.NONE; + + vectorizationIssueList.add( + "Cannot specialize aggregation function " + countAggrDesc.getGenericUDAFName() + + " that has more than 1 input parameter"); + } + + } else { + + // FUTURE: More aggregations. + aggregationVariation = AggregationVariation.NONE; + } + + // TEMPORARY: Restriction + boolean isSingleColumnKey = (vecKeyExprSize == 1); + + VectorGroupByInfo.HashTableKeyType hashTableKeyType = VectorGroupByInfo.HashTableKeyType.NONE; + if (isSingleColumnKey) { + ColumnVector.Type colVectorType = vecKeyExprs[0].getOutputColumnVectorType(); + switch (colVectorType) { + case LONG: + + // Integer family, date, interval year month. + hashTableKeyType = VectorGroupByInfo.HashTableKeyType.LONG; + break; + case BYTES: + + // String family. + hashTableKeyType = VectorGroupByInfo.HashTableKeyType.STRING; + break; + default: + + // All other data types get serialized. + hashTableKeyType = VectorGroupByInfo.HashTableKeyType.SERIALIZE; + break; + } + } + + vectorGroupByInfo.setIsVectorizationGroupByNativeEnabled( + weCanAttemptGroupByNativeVectorization); + vectorGroupByInfo.setEngine(engine); + + // Temporary restrictions... + vectorGroupByInfo.setIsSingleKeyColumn(isSingleColumnKey); + + vectorGroupByInfo.setVectorizationIssueList(vectorizationIssueList); + + vectorGroupByInfo.setAggregationVariation(aggregationVariation); + vectorGroupByInfo.setSingleCountAggregation(singleCountAggregation); + + vectorGroupByInfo.setHashTableKeyType(hashTableKeyType); + + vectorGroupByInfo.setTestGroupByMaxMemoryAvailable(testGroupByMaxMemoryAvailable); + + // So EXPLAIN VECTORIZATION can show native conditions, etc. + vectorDesc.setVectorGroupByInfo(vectorGroupByInfo); + + if (!weCanAttemptGroupByNativeVectorization || + !isTezOrSpark || + !isSingleColumnKey || + (aggregationVariation == AggregationVariation.NONE) || + groupByMode != GroupByDesc.Mode.HASH || + desc.isGroupingSetsPresent() || + vectorizationIssueList.size() > 0) { + return false; + } + + return true; + } + private Operator specializeReduceSinkOperator( Operator op, VectorizationContext vContext, ReduceSinkDesc desc, VectorReduceSinkDesc vectorDesc) throws HiveException { @@ -4232,16 +4548,30 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { Operator groupByOp, VectorizationContext vContext, VectorGroupByDesc vectorGroupByDesc) throws HiveException { - ImmutablePair,String> pair = + String issue = + doVectorizeGroupByOperatorPreparation( + groupByOp, vContext, vectorGroupByDesc); + Preconditions.checkState(issue == null); + return doVectorizeGroupByOperator( groupByOp, vContext, vectorGroupByDesc); - return pair.left; + } + + private static Operator doVectorizeGroupByOperator( + Operator groupByOp, VectorizationContext vContext, + VectorGroupByDesc vectorGroupByDesc) + throws HiveException { + Operator vectorOp = + OperatorFactory.getVectorOperator( + groupByOp.getCompilationOpContext(), (GroupByDesc) groupByOp.getConf(), + vContext, vectorGroupByDesc); + return vectorOp; } /* * NOTE: The VectorGroupByDesc has already been allocated and will be updated here. */ - private static ImmutablePair,String> doVectorizeGroupByOperator( + private static String doVectorizeGroupByOperatorPreparation( Operator groupByOp, VectorizationContext vContext, VectorGroupByDesc vectorGroupByDesc) throws HiveException { @@ -4263,7 +4593,7 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { ImmutablePair pair = getVectorAggregationDesc(aggDesc, vContext); if (pair.left == null) { - return new ImmutablePair, String>(null, pair.right); + return pair.right; } vecAggrDescs[i] = pair.left; @@ -4274,14 +4604,9 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { vectorGroupByDesc.setKeyExpressions(vecKeyExpressions); vectorGroupByDesc.setVecAggrDescs(vecAggrDescs); vectorGroupByDesc.setProjectedOutputColumns(projectedOutputColumns); - Operator vectorOp = - OperatorFactory.getVectorOperator( - groupByOp.getCompilationOpContext(), groupByDesc, - vContext, vectorGroupByDesc); - return new ImmutablePair, String>(vectorOp, null); - } - static int fake; + return null; // No issue. + } public static Operator vectorizeSelectOperator( Operator selectOp, VectorizationContext vContext, @@ -4830,23 +5155,40 @@ private static VectorPTFInfo createVectorPTFInfo(Operator,String> pair = - doVectorizeGroupByOperator(op, vContext, vectorGroupByDesc); - if (pair.left == null) { - setOperatorIssue(pair.right); + String issue = + doVectorizeGroupByOperatorPreparation(op, vContext, vectorGroupByDesc); + if (issue != null) { + setOperatorIssue(issue); throw new VectorizerCannotVectorizeException(); } - vectorOp = pair.left; - isNative = false; + + GroupByDesc groupByDesc = (GroupByDesc) op.getConf(); + boolean specialize = + canSpecializeGroupBy(groupByDesc, vectorGroupByDesc, isTezOrSpark, vContext); + + if (!specialize) { + + vectorOp = + doVectorizeGroupByOperator(op, vContext, vectorGroupByDesc); + isNative = false; + + } else { + + vectorOp = + specializeGroupByOperator(op, vContext, groupByDesc, vectorGroupByDesc); + isNative = true; + } if (vectorTaskColumnInfo != null) { VectorExpression[] vecKeyExpressions = vectorGroupByDesc.getKeyExpressions(); if (usesVectorUDFAdaptor(vecKeyExpressions)) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } VectorAggregationDesc[] vecAggrDescs = vectorGroupByDesc.getVecAggrDescs(); - for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) { - if (usesVectorUDFAdaptor(vecAggrDesc.getInputExpression())) { - vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); + if (vecAggrDescs != null) { + for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) { + if (usesVectorUDFAdaptor(vecAggrDesc.getInputExpression())) { + vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); + } } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java index 31237c8..db31210 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java @@ -20,8 +20,10 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; +import java.util.Set; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc; @@ -31,7 +33,10 @@ import org.apache.hive.common.util.AnnotationUtils; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; - +import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc.ProcessingMode; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.AggregationVariation; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.SingleCountAggregation; +import org.apache.hadoop.hive.ql.plan.VectorGroupByInfo.SingleCountAggregation.SingleCountAggregationKind; /** * GroupByDesc. @@ -324,26 +329,38 @@ public Object clone() { this.groupingSetPosition, this.isDistinct); } + // Use LinkedHashSet to give predictable display order. + private static final Set vectorizableGroupByNativeEngines = + new LinkedHashSet(Arrays.asList("tez", "spark")); + public class GroupByOperatorExplainVectorization extends OperatorExplainVectorization { private final GroupByDesc groupByDesc; private final VectorGroupByDesc vectorGroupByDesc; + private final VectorGroupByInfo vectorGroupByInfo; + + private VectorizationCondition[] nativeConditions; public GroupByOperatorExplainVectorization(GroupByDesc groupByDesc, VectorGroupByDesc vectorGroupByDesc) { - // Native vectorization not supported. - super(vectorGroupByDesc, false); + super(vectorGroupByDesc, vectorGroupByDesc.isNative()); this.groupByDesc = groupByDesc; this.vectorGroupByDesc = vectorGroupByDesc; + vectorGroupByInfo = vectorGroupByDesc.getVectorGroupByInfo(); } - @Explain(vectorization = Vectorization.EXPRESSION, displayName = "keyExpressions", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.EXPRESSION, displayName = "keyExpressions", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getKeysExpression() { return vectorExpressionsToStringList(vectorGroupByDesc.getKeyExpressions()); } - @Explain(vectorization = Vectorization.EXPRESSION, displayName = "aggregators", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.EXPRESSION, displayName = "aggregators", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getAggregators() { + if (isNative) { + return null; + } VectorAggregationDesc[] vecAggrDescs = vectorGroupByDesc.getVecAggrDescs(); List vecAggrList = new ArrayList(vecAggrDescs.length); for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) { @@ -352,17 +369,20 @@ public GroupByOperatorExplainVectorization(GroupByDesc groupByDesc, return vecAggrList; } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "vectorProcessingMode", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "vectorProcessingMode", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public String getProcessingMode() { return vectorGroupByDesc.getProcessingMode().name(); } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "groupByMode", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "groupByMode", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public String getGroupByMode() { return groupByDesc.getMode().name(); } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "vectorOutputConditionsNotMet", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "vectorOutputConditionsNotMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getVectorOutputConditionsNotMet() { List results = new ArrayList(); @@ -379,13 +399,113 @@ public String getGroupByMode() { return results; } - @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumnNums", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public String getProjectedOutputColumnNums() { return Arrays.toString(vectorGroupByDesc.getProjectedOutputColumns()); } + + private VectorizationCondition[] createNativeConditions() { + + boolean enabled = vectorGroupByInfo.getIsVectorizationGroupByNativeEnabled(); + + String engine = vectorGroupByInfo.getEngine(); + String engineInSupportedCondName = + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " " + engine + " IN " + vectorizableGroupByNativeEngines; + boolean engineInSupported = vectorizableGroupByNativeEngines.contains(engine); + + final List vectorizationIssueList = vectorGroupByInfo.getVectorizationIssueList(); + + List conditionList = new ArrayList(); + conditionList.add( + new VectorizationCondition( + enabled, + HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_NATIVE_ENABLED.varname)); + conditionList.add( + new VectorizationCondition( + engineInSupported, + engineInSupportedCondName)); + conditionList.add( + new VectorizationCondition( + vectorGroupByInfo.getIsSingleKeyColumn(), + "Single Key Column")); + AggregationVariation aggregationVariation = vectorGroupByInfo.getAggregationVariation(); + conditionList.add( + new VectorizationCondition( + (aggregationVariation == AggregationVariation.HASH_SINGLE_COUNT || + aggregationVariation == AggregationVariation.HASH_DUPLICATE_REDUCTION), + "Single COUNT aggregation or Duplicate Reduction")); + conditionList.add( + new VectorizationCondition( + (vectorGroupByDesc.getProcessingMode() == ProcessingMode.HASH), + "Group By Mode HASH")); + conditionList.add( + new VectorizationCondition( + !groupByDesc.isGroupingSetsPresent(), + "No Grouping Sets")); + if (vectorizationIssueList.size() != 0) { + conditionList.add( + new VectorizationCondition( + true, + "Has issues \"" + + vectorizationIssueList.toString() + "\"")); + } + + VectorizationCondition[] conditions = + conditionList.toArray(new VectorizationCondition[0]); + + return conditions; + } + + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getNativeConditionsMet() { + + // For now, just report native conditions met / not met for HASH mode. + // It dramatically limits the number of Q file differences. + if (vectorGroupByDesc.getProcessingMode() != ProcessingMode.HASH) { + return null; + } + + if (nativeConditions == null) { + nativeConditions = createNativeConditions(); + } + return VectorizationCondition.getConditionsMet(nativeConditions); + } + + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsNotMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getNativeConditionsNotMet() { + + // For now, just report native conditions met / not met for HASH mode. + // It dramatically limits the number of Q file differences. + if (vectorGroupByDesc.getProcessingMode() != ProcessingMode.HASH) { + return null; + } + + if (nativeConditions == null) { + nativeConditions = createNativeConditions(); + } + return VectorizationCondition.getConditionsNotMet(nativeConditions); + } + + @Explain(vectorization = Vectorization.DETAIL, displayName = "singleCountAggreation", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public String getSingleCountAggreation() { + if (!isNative) { + return null; + } + final SingleCountAggregationKind singleCountAggregationKind = + vectorGroupByInfo.getSingleCountAggregation().getSingleCountAggregationKind(); + if (singleCountAggregationKind == SingleCountAggregationKind.NONE) { + return null; + } + return singleCountAggregationKind.name(); + } } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "Group By Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "Group By Vectorization", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public GroupByOperatorExplainVectorization getGroupByVectorization() { VectorGroupByDesc vectorGroupByDesc = (VectorGroupByDesc) getVectorDesc(); if (vectorGroupByDesc == null) { @@ -404,11 +524,14 @@ public static String getComplexTypeEnabledCondition( public static String getComplexTypeWithGroupByEnabledCondition( boolean isVectorizationComplexTypesEnabled, boolean isVectorizationGroupByComplexTypesEnabled) { - final boolean enabled = (isVectorizationComplexTypesEnabled && isVectorizationGroupByComplexTypesEnabled); + final boolean enabled = + (isVectorizationComplexTypesEnabled && isVectorizationGroupByComplexTypesEnabled); return "(" + - HiveConf.ConfVars.HIVE_VECTORIZATION_COMPLEX_TYPES_ENABLED.varname + " " + isVectorizationComplexTypesEnabled + + HiveConf.ConfVars.HIVE_VECTORIZATION_COMPLEX_TYPES_ENABLED.varname + " " + + isVectorizationComplexTypesEnabled + " AND " + - HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_COMPLEX_TYPES_ENABLED.varname + " " + isVectorizationGroupByComplexTypesEnabled + + HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_COMPLEX_TYPES_ENABLED.varname + " " + + isVectorizationGroupByComplexTypesEnabled + ") IS " + enabled; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java index caf0c67..b7e60f7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java @@ -67,8 +67,12 @@ private boolean isVectorizationComplexTypesEnabled; private boolean isVectorizationGroupByComplexTypesEnabled; + private boolean isNative; + private VectorGroupByInfo vectorGroupByInfo; + public VectorGroupByDesc() { - this.processingMode = ProcessingMode.NONE; + processingMode = ProcessingMode.NONE; + isNative = false; } public void setProcessingMode(ProcessingMode processingMode) { @@ -78,6 +82,14 @@ public ProcessingMode getProcessingMode() { return processingMode; } + public void setIsNative(boolean isNative) { + this.isNative = isNative; + } + + public boolean isNative() { + return isNative; + } + public void setKeyExpressions(VectorExpression[] keyExpressions) { this.keyExpressions = keyExpressions; } @@ -118,6 +130,14 @@ public boolean getIsVectorizationGroupByComplexTypesEnabled() { return isVectorizationGroupByComplexTypesEnabled; } + public void setVectorGroupByInfo(VectorGroupByInfo vectorGroupByInfo) { + this.vectorGroupByInfo = vectorGroupByInfo; + } + + public VectorGroupByInfo getVectorGroupByInfo() { + return vectorGroupByInfo; + } + /** * Which ProcessingMode for VectorGroupByOperator? * diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByInfo.java ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByInfo.java new file mode 100644 index 0000000..994a916 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByInfo.java @@ -0,0 +1,195 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; + +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +import com.google.common.base.Preconditions; + +import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; + +/** + * VectorGroupByInfo. + * + * A convenience data structure that has information needed to vectorize group by. + * + * It is created by the Vectorizer when it is determining whether it can specialize so the + * information doesn't have to be recreated again and agains by the VectorGroupByOperator's + * constructors and later during execution. + */ +public class VectorGroupByInfo { + + private static long serialVersionUID = 1L; + + public static enum HashTableKeyType { + NONE, + LONG, + STRING, + SERIALIZE + } + + //------------------------------------------------------------------------------------------------ + + public static enum AggregationVariation { + NONE, + HASH_SINGLE_COUNT, + HASH_DUPLICATE_REDUCTION + } + + public static class SingleCountAggregation { + + public enum SingleCountAggregationKind { + NONE, + COUNT_STAR, + COUNT_KEY, + COUNT_COLUMN + } + + private final SingleCountAggregationKind singleCountAggregationKind; + private final int countColumnNum; + + public SingleCountAggregation(SingleCountAggregationKind singleCountAggregationKind) { + this.singleCountAggregationKind = singleCountAggregationKind; + countColumnNum = -1; + } + + public SingleCountAggregation(SingleCountAggregationKind singleCountAggregationKind, + int countColumnNum) { + this.singleCountAggregationKind = singleCountAggregationKind; + this.countColumnNum = countColumnNum; + } + + public SingleCountAggregationKind getSingleCountAggregationKind() { + return singleCountAggregationKind; + } + + public int getCountColumnNum() { + return countColumnNum; + } + } + + //--------------------------------------------------------------------------- + + private boolean isVectorizationGroupByNativeEnabled; + private String engine; + + // Temporary restrictions... + private boolean isSingleColumnKey; + + private List vectorizationIssueList; + + private AggregationVariation aggregationVariation; + private SingleCountAggregation singleCountAggregation; + + private HashTableKeyType hashTableKeyType; + + private int testGroupByMaxMemoryAvailable; + + public VectorGroupByInfo() { + isVectorizationGroupByNativeEnabled = false; + + isSingleColumnKey = false; + + vectorizationIssueList = null; + + hashTableKeyType = HashTableKeyType.NONE; + + testGroupByMaxMemoryAvailable = -1; + } + + public boolean getIsVectorizationGroupByNativeEnabled() { + return isVectorizationGroupByNativeEnabled; + } + + public void setIsVectorizationGroupByNativeEnabled(boolean isVectorizationGroupByNativeEnabled) { + this.isVectorizationGroupByNativeEnabled = isVectorizationGroupByNativeEnabled; + } + + public String getEngine() { + return engine; + } + + public void setEngine(String engine) { + this.engine = engine; + } + + public boolean getIsSingleKeyColumn() { + return isSingleColumnKey; + } + + public void setIsSingleKeyColumn(boolean isSingleColumnKey) { + this.isSingleColumnKey = isSingleColumnKey; + } + + public List getVectorizationIssueList() { + return vectorizationIssueList; + } + + public void setVectorizationIssueList(List vectorizationIssueList) { + this.vectorizationIssueList = vectorizationIssueList; + } + + public void setAggregationVariation(AggregationVariation aggregationVariation) { + this.aggregationVariation = aggregationVariation; + } + + public AggregationVariation getAggregationVariation() { + return aggregationVariation; + } + + public void setSingleCountAggregation(SingleCountAggregation singleCountAggregation) { + this.singleCountAggregation = singleCountAggregation; + } + + public SingleCountAggregation getSingleCountAggregation() { + return singleCountAggregation; + } + + public HashTableKeyType getHashTableKeyType() { + return hashTableKeyType; + } + + public void setHashTableKeyType(HashTableKeyType hashTableKeyType) { + this.hashTableKeyType = hashTableKeyType; + } + + public int getTestGroupByMaxMemoryAvailable() { + return testGroupByMaxMemoryAvailable; + } + + public void setTestGroupByMaxMemoryAvailable(int testGroupByMaxMemoryAvailable) { + this.testGroupByMaxMemoryAvailable = testGroupByMaxMemoryAvailable; + } +} diff --git ql/src/test/queries/clientpositive/vector_count_simple.q ql/src/test/queries/clientpositive/vector_count_simple.q new file mode 100644 index 0000000..bdcf023 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_count_simple.q @@ -0,0 +1,485 @@ +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=none; +set hive.vectorized.execution.groupby.native.enabled=true; +-- We want to create selectedInUse batches with WHERE expressions. +SET hive.optimize.ppd=false; + +-- SORT_QUERY_RESULTS + + + +CREATE TABLE groupby_long_1a_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1a.txt' OVERWRITE INTO TABLE groupby_long_1a_txt; +CREATE TABLE groupby_long_1a STORED AS ORC AS SELECT * FROM groupby_long_1a_txt; + +-- Add a single NULL row that will come from ORC as isRepeated. +insert into groupby_long_1a values (NULL); + +-- And, a single non-NULL key already in the table and one that isn't row that will come +-- from ORC as isRepeated, too. +insert into groupby_long_1a values (-5206670856103795573); +insert into groupby_long_1a values (800); + +CREATE TABLE groupby_long_1a_nonull_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1a_nonull.txt' OVERWRITE INTO TABLE groupby_long_1a_nonull_txt; +CREATE TABLE groupby_long_1a_nonull STORED AS ORC AS SELECT * FROM groupby_long_1a_nonull_txt; + +insert into groupby_long_1a_nonull values (-6187919478609154811); +insert into groupby_long_1a_nonull values (1000); + + + +CREATE TABLE groupby_long_1b_txt(key smallint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1b.txt' OVERWRITE INTO TABLE groupby_long_1b_txt; +CREATE TABLE groupby_long_1b STORED AS ORC AS SELECT * FROM groupby_long_1b_txt; + +insert into groupby_long_1b values (NULL); + +insert into groupby_long_1b values (32030); +insert into groupby_long_1b values (800); + +CREATE TABLE groupby_long_1b_nonull_txt(key smallint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1b_nonull.txt' OVERWRITE INTO TABLE groupby_long_1b_nonull_txt; +CREATE TABLE groupby_long_1b_nonull STORED AS ORC AS SELECT * FROM groupby_long_1b_nonull_txt; + +insert into groupby_long_1b_nonull values (31713); +insert into groupby_long_1b_nonull values (34); + + + +CREATE TABLE groupby_long_1c_txt(key int, b_string string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1c.txt' OVERWRITE INTO TABLE groupby_long_1c_txt; +CREATE TABLE groupby_long_1c STORED AS ORC AS SELECT * FROM groupby_long_1c_txt; + +insert into groupby_long_1c values (NULL, NULL); +insert into groupby_long_1c values (NULL, 'TKTKGVGFW'); +insert into groupby_long_1c values (NULL, 'NEW'); + +CREATE TABLE groupby_long_1c_nonull_txt(key int, b_string string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1c_nonull.txt' OVERWRITE INTO TABLE groupby_long_1c_nonull_txt; +CREATE TABLE groupby_long_1c_nonull STORED AS ORC AS SELECT * FROM groupby_long_1c_nonull_txt; + +insert into groupby_long_1c values (1928928239, NULL); +insert into groupby_long_1c values (9999, 'NEW'); + + + +-- *_long_1a + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_long_1a group by key; +select key, count(key) from groupby_long_1a group by key; +select key, count(key) from groupby_long_1a where key != -8460550397108077433 group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_long_1a group by key; +select key, count(*) from groupby_long_1a group by key; +select key, count(*) from groupby_long_1a where key != -8460550397108077433 group by key; + +-- *_long_1a_nonull + +-- COUNT_KEY +select key, count(key) from groupby_long_1a_nonull group by key; +select key, count(key) from groupby_long_1a_nonull where key != 1569543799237464101 group by key; + +-- COUNT_STAR +select key, count(*) from groupby_long_1a_nonull group by key; +select key, count(*) from groupby_long_1a_nonull where key != 1569543799237464101 group by key; + +-- *_long_1b + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_long_1b group by key; +select key, count(key) from groupby_long_1b group by key; +select key, count(key) from groupby_long_1b where key != 32030 group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_long_1b group by key; +select key, count(*) from groupby_long_1b group by key; +select key, count(*) from groupby_long_1b where key != 32030 group by key; + +-- *_long_1b_nonull + +-- COUNT_KEY +select key, count(key) from groupby_long_1b_nonull group by key; +select key, count(key) from groupby_long_1b_nonull where key != 32030 group by key; + +-- COUNT_STAR +select key, count(*) from groupby_long_1b_nonull group by key; +select key, count(*) from groupby_long_1b_nonull where key != 32030 group by key; + +-- *_long_1c + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_long_1c group by key; +select key, count(key) from groupby_long_1c group by key; +select key, count(key) from groupby_long_1c where key != -1437463633 group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_long_1c group by key; +select key, count(*) from groupby_long_1c group by key; +select key, count(*) from groupby_long_1c where key != -1437463633 group by key; + +-- COUNT_COLUMN +explain vectorization operator +select key, count(b_string) from groupby_long_1c group by key; +select key, count(b_string) from groupby_long_1c group by key; +select key, count(b_string) from groupby_long_1c where key != -1437463633 group by key; + +-- *_long_1c_nonull + +-- COUNT_KEY +select key, count(key) from groupby_long_1c_nonull group by key; +select key, count(key) from groupby_long_1c_nonull where key != -1437463633 group by key; + +-- COUNT_STAR +select key, count(*) from groupby_long_1c_nonull group by key; +select key, count(*) from groupby_long_1c_nonull where key != -1437463633 group by key; + +-- COUNT_COLUMN +select key, count(b_string) from groupby_long_1c_nonull group by key; +select key, count(b_string) from groupby_long_1c_nonull where key != -1437463633 group by key; + + + +CREATE TABLE groupby_string_1a_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a.txt' OVERWRITE INTO TABLE groupby_string_1a_txt; +CREATE TABLE groupby_string_1a STORED AS ORC AS SELECT * FROM groupby_string_1a_txt; + +-- Add a single NULL row that will come from ORC as isRepeated. +insert into groupby_string_1a values (NULL); + +-- And, a single non-NULL key already in the table and one that isn't row that will come +-- from ORC as isRepeated, too. +insert into groupby_string_1a values ('QNCYBDW'); +insert into groupby_string_1a values ('NOT'); + +CREATE TABLE groupby_string_1a_nonull_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a_nonull.txt' OVERWRITE INTO TABLE groupby_string_1a_nonull_txt; +CREATE TABLE groupby_string_1a_nonull STORED AS ORC AS SELECT * FROM groupby_string_1a_nonull_txt; + +insert into groupby_string_1a_nonull values ('PXLD'); +insert into groupby_string_1a_nonull values ('AA'); + +-- Use same data as 1a. +CREATE TABLE groupby_string_1b_txt(key char(4)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a.txt' OVERWRITE INTO TABLE groupby_string_1b_txt; +CREATE TABLE groupby_string_1b STORED AS ORC AS SELECT * FROM groupby_string_1b_txt; + +insert into groupby_string_1a values (NULL); + +insert into groupby_string_1a values ('QNCYBDW'); +insert into groupby_string_1a values ('NOT'); + +CREATE TABLE groupby_string_1b_nonull_txt(key char(4)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a_nonull.txt' OVERWRITE INTO TABLE groupby_string_1b_nonull_txt; +CREATE TABLE groupby_string_1b_nonull STORED AS ORC AS SELECT * FROM groupby_string_1b_nonull_txt; + +insert into groupby_string_1b_nonull values ('PXLD'); +insert into groupby_string_1b_nonull values ('AA'); + +CREATE TABLE groupby_string_1c_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1c.txt' OVERWRITE INTO TABLE groupby_string_1c_txt; +CREATE TABLE groupby_string_1c STORED AS ORC AS SELECT * FROM groupby_string_1c_txt; + +insert into groupby_string_1c values (NULL, NULL, NULL); +insert into groupby_string_1c values (NULL, '2141-02-19', '2092-06-07 06:42:30.000538454'); +insert into groupby_string_1c values (NULL, '2018-04-11', NULL); + +insert into groupby_string_1c values ('ATZJTPECF', NULL, NULL); +insert into groupby_string_1c values ('ATZJTPECF', '2144-01-13', '2092-06-07 06:42:30.000538454'); +insert into groupby_string_1c values ('ATZJTPECF', '1988-04-23', NULL); + +insert into groupby_string_1c values ('BB', NULL, NULL); +insert into groupby_string_1c values ('CC', '2018-04-12', '2092-06-07 06:42:30.000538454'); +insert into groupby_string_1c values ('DD', '2018-04-14', NULL); + +CREATE TABLE groupby_string_1c_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1c_nonull.txt' OVERWRITE INTO TABLE groupby_string_1c_nonull_txt; +CREATE TABLE groupby_string_1c_nonull STORED AS ORC AS SELECT * FROM groupby_string_1c_nonull_txt; + +insert into groupby_string_1c_nonull values ('SDA', NULL, NULL); +insert into groupby_string_1c_nonull values ('SDA', '2144-01-13', '2092-06-07 06:42:30.000538454'); +insert into groupby_string_1c_nonull values ('SDA', '1988-04-23', NULL); + +insert into groupby_string_1c_nonull values ('EEE', NULL, NULL); +insert into groupby_string_1c_nonull values ('FFF', '880-11-01', '22073-03-21 15:32:57.617920888'); +insert into groupby_string_1c_nonull values ('GGG', '2018-04-15', NULL); + +-- *_string_1a + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_string_1a group by key; +select key, count(key) from groupby_string_1a group by key; +select key, count(key) from groupby_string_1a where key != 'PXLD' group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_string_1a group by key; +select key, count(*) from groupby_string_1a group by key; +select key, count(*) from groupby_string_1a where key != 'PXLD' group by key; + +-- *_string_1a_nonull + +-- COUNT_KEY +select key, count(key) from groupby_string_1a_nonull group by key; +select key, count(key) from groupby_string_1a_nonull where key != 'MXGDMBD' group by key; + +-- COUNT_STAR +select key, count(*) from groupby_string_1a_nonull group by key; +select key, count(*) from groupby_string_1a_nonull where key != 'MXGDMBD' group by key; + +-- *_string_1b + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_string_1b group by key; +select key, count(key) from groupby_string_1b group by key; +select key, count(key) from groupby_string_1b where key != 'MXGD' group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_string_1b group by key; +select key, count(*) from groupby_string_1b group by key; +select key, count(*) from groupby_string_1b where key != 'MXGD' group by key; + +-- *_string_1b_nonull + +-- COUNT_KEY +select key, count(key) from groupby_string_1b_nonull group by key; +select key, count(key) from groupby_string_1b_nonull where key != 'MXGD' group by key; + +-- COUNT_STAR +select key, count(*) from groupby_string_1b_nonull group by key; +select key, count(*) from groupby_string_1b_nonull where key != 'MXGD' group by key; + +-- *_string_1c + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_string_1c group by key; +select key, count(key) from groupby_string_1c group by key; +select key, count(key) from groupby_string_1c where key != 'IWEZJHKE' group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_string_1c group by key; +select key, count(*) from groupby_string_1c group by key; +select key, count(*) from groupby_string_1c where key != 'IWEZJHKE' group by key; + +-- COUNT_COLUMN s_date +explain vectorization operator +select key, count(s_date) from groupby_string_1c group by key; +select key, count(s_date) from groupby_string_1c group by key; +select key, count(s_date) from groupby_string_1c where key != 'IWEZJHKE' group by key; + +-- COUNT_COLUMN s_timestamp +explain vectorization operator +select key, count(s_timestamp) from groupby_string_1c group by key; +select key, count(s_timestamp) from groupby_string_1c group by key; +select key, count(s_timestamp) from groupby_string_1c where key != 'IWEZJHKE' group by key; + +-- *_string_1c_nonull + +-- COUNT_KEY +select key, count(key) from groupby_string_1c_nonull group by key; +select key, count(key) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key; + +-- COUNT_STAR +select key, count(*) from groupby_string_1c_nonull group by key; +select key, count(*) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key; + +-- COUNT_COLUMN s_date +select key, count(s_date) from groupby_string_1c_nonull group by key; +select key, count(s_date) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key; + +-- COUNT_COLUMN s_timestamp +select key, count(s_timestamp) from groupby_string_1c_nonull group by key; +select key, count(s_timestamp) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key; + + + + +CREATE TABLE groupby_serialize_1a_txt(key timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1a.txt' OVERWRITE INTO TABLE groupby_serialize_1a_txt; +CREATE TABLE groupby_serialize_1a STORED AS ORC AS SELECT * FROM groupby_serialize_1a_txt; + +CREATE TABLE groupby_serialize_1a_nonull_txt(key timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1a_nonull.txt' OVERWRITE INTO TABLE groupby_serialize_1a_nonull_txt; +CREATE TABLE groupby_serialize_1a_nonull STORED AS ORC AS SELECT * FROM groupby_serialize_1a_nonull_txt; + + +CREATE TABLE groupby_serialize_1b_txt(key timestamp, c_smallint smallint, c_string string, c_double double) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1b.txt' OVERWRITE INTO TABLE groupby_serialize_1b_txt; +CREATE TABLE groupby_serialize_1b STORED AS ORC AS SELECT * FROM groupby_serialize_1b_txt; + +CREATE TABLE groupby_serialize_1b_nonull_txt(key timestamp, c_smallint smallint, c_string string, c_double double) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1b_nonull.txt' OVERWRITE INTO TABLE groupby_serialize_1b_nonull_txt; +CREATE TABLE groupby_serialize_1b_nonull STORED AS ORC AS SELECT * FROM groupby_serialize_1b_nonull_txt; + + +-- *_serialize_1a + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_serialize_1a group by key; +select key, count(key) from groupby_serialize_1a group by key; +select key, count(key) from groupby_serialize_1a where key != '2082-07-14 04:00:40.695380469' group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_serialize_1a group by key; +select key, count(*) from groupby_serialize_1a group by key; +select key, count(*) from groupby_serialize_1a where key != '2082-07-14 04:00:40.695380469' group by key; + +-- *_serialize_1a_nonull + +-- COUNT_KEY +select key, count(key) from groupby_serialize_1a_nonull group by key; +select key, count(key) from groupby_serialize_1a_nonull where key != '2082-07-14 04:00:40.695380469' group by key; + +-- COUNT_STAR +select key, count(*) from groupby_serialize_1a_nonull group by key; +select key, count(*) from groupby_serialize_1a_nonull where key != '2082-07-14 04:00:40.695380469' group by key; + + +-- *_serialize_1b + +-- COUNT_KEY +explain vectorization operator +select key, count(key) from groupby_serialize_1b group by key; +select key, count(key) from groupby_serialize_1b group by key; +select key, count(key) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key; + +-- COUNT_STAR +explain vectorization operator +select key, count(*) from groupby_serialize_1b group by key; +select key, count(*) from groupby_serialize_1b group by key; +select key, count(*) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key; + +-- COUNT_COLUMN c_smallint +explain vectorization operator +select key, count(c_smallint) from groupby_serialize_1b group by key; +select key, count(c_smallint) from groupby_serialize_1b group by key; +select key, count(c_smallint) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key; + +-- COUNT_COLUMN c_string +explain vectorization operator +select key, count(c_string) from groupby_serialize_1b group by key; +select key, count(c_string) from groupby_serialize_1b group by key; +select key, count(c_string) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key; + +-- *_serialize_1b_nonull + +-- COUNT_KEY +select key, count(key) from groupby_serialize_1b_nonull group by key; +select key, count(key) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key; + +-- COUNT_STAR +select key, count(*) from groupby_serialize_1b_nonull group by key; +select key, count(*) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key; + +-- COUNT_COLUMN c_smallint +select key, count(c_smallint) from groupby_serialize_1b_nonull group by key; +select key, count(c_smallint) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key; + +-- COUNT_COLUMN c_string +select key, count(c_string) from groupby_serialize_1b_nonull group by key; +select key, count(c_string) from groupby_serialize_1b_nonull where key != '22083-06-07 09:35:19.383' group by key; + +------------------------------------------------------------------------------------------ + +CREATE TABLE over10k(t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal(4,2), + bin binary) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE; + +LOAD DATA LOCAL INPATH '../../data/files/over10k' OVERWRITE INTO TABLE over10k; + +-- STRING +explain vectorization operator +select s, count(s) from over10k group by s order by s limit 10; +select s, count(s) from over10k group by s order by s limit 10; + +explain vectorization operator +select s, count(ts) from over10k group by s order by s limit 10; +select s, count(ts) from over10k group by s order by s limit 10; + +explain vectorization operator +select s, count(*) from over10k group by s order by s limit 10; +select s, count(*) from over10k group by s order by s limit 10; + +-- SERIALIZE TIMESTAMP +explain vectorization operator +select ts, count(ts) from over10k group by ts order by ts limit 10; +select ts, count(ts) from over10k group by ts order by ts limit 10; + +explain vectorization operator +select ts, count(d) from over10k group by ts order by ts limit 10; +select ts, count(d) from over10k group by ts order by ts limit 10; + +explain vectorization operator +select ts, count(*) from over10k group by ts order by ts limit 10; +select ts, count(*) from over10k group by ts order by ts limit 10; + +-- SERIALIZE DECIMAL +explain vectorization operator +select `dec`, count(`dec`) from over10k group by `dec` order by `dec` limit 10; +select `dec`, count(`dec`) from over10k group by `dec` order by `dec` limit 10; + +explain vectorization operator +select `dec`, count(bin) from over10k group by `dec` order by `dec` limit 10; +select `dec`, count(bin) from over10k group by `dec` order by `dec` limit 10; + +explain vectorization operator +select `dec`, count(*) from over10k group by `dec` order by `dec` limit 10; +select `dec`, count(*) from over10k group by `dec` order by `dec` limit 10; + + +set hive.test.vectorized.groupby.native.max.memory.available=1024; + +explain vectorization operator +select i, count(i) from over10k group by i order by i limit 10; +select i, count(i) from over10k group by i order by i limit 10; + +explain vectorization operator +select i, count(b) from over10k group by i order by i limit 10; +select i, count(b) from over10k group by i order by i limit 10; + +explain vectorization operator +select i, count(*) from over10k group by i order by i limit 10; +select i, count(*) from over10k group by i order by i limit 10; \ No newline at end of file diff --git ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out index 89b7169..e6e2177 100644 --- ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out +++ ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out @@ -1497,6 +1497,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 3:string, col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] Reduce Sink Vectorization: @@ -1594,6 +1596,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] App Master Event Vectorization: @@ -1608,6 +1612,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] App Master Event Vectorization: @@ -2284,6 +2290,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 3:string, col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] Reduce Sink Vectorization: @@ -2381,6 +2389,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] App Master Event Vectorization: @@ -2395,6 +2405,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] App Master Event Vectorization: diff --git ql/src/test/results/clientpositive/llap/llap_partitioned.q.out ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index 799062e..c0fd24d 100644 --- ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1721,10 +1721,11 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 10:tinyint - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One Long Key IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: tinyint) @@ -1748,7 +1749,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -2106,6 +2107,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out index 2c13d5d..e9519c2 100644 --- ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out +++ ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out @@ -73,6 +73,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -252,6 +254,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: One Long Key IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out index 1e090f0..29f3282 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out @@ -146,6 +146,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -286,6 +288,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -426,6 +430,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out index c99ac8d..5fcc875 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out @@ -85,6 +85,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out index 54216fa..51481e9 100644 --- ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out +++ ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out @@ -270,6 +270,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_between_in.q.out ql/src/test/results/clientpositive/llap/vector_between_in.q.out index 6093beb..8f88e8c 100644 --- ql/src/test/results/clientpositive/llap/vector_between_in.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_in.q.out @@ -166,6 +166,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -370,6 +372,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -764,6 +768,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1111,11 +1117,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1140,7 +1146,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1249,11 +1255,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1278,7 +1284,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1387,11 +1393,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1416,7 +1422,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: true vectorized: true Reducer 2 @@ -1525,11 +1531,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1554,7 +1560,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: true vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out index aabfc73..0aac152 100644 --- ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out +++ ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out @@ -175,6 +175,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -365,11 +367,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashStringKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 10:binary - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: bin (type: binary) @@ -394,7 +396,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out index 861ae9a..3764516 100644 --- ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out +++ ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out @@ -149,6 +149,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5] keys: _col0 (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_char_2.q.out ql/src/test/results/clientpositive/llap/vector_char_2.q.out index 9a43659..0ded352 100644 --- ql/src/test/results/clientpositive/llap/vector_char_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_2.q.out @@ -110,6 +110,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:char(20) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: char(20)) @@ -306,6 +308,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:char(20) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: char(20)) diff --git ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out index e8bb722..30547d3 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out @@ -78,6 +78,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string) @@ -303,6 +305,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_complex_all.q.out ql/src/test/results/clientpositive/llap/vector_complex_all.q.out index f2277c1..988db52 100644 --- ql/src/test/results/clientpositive/llap/vector_complex_all.q.out +++ ql/src/test/results/clientpositive/llap/vector_complex_all.q.out @@ -940,6 +940,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1157,13 +1159,14 @@ STAGE PLANS: Group By Operator aggregations: count(val) Group By Vectorization: - aggregators: VectorUDAFCount(col 4:string) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashStringKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 0:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_COLUMN keys: str (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -1188,7 +1191,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -1300,13 +1303,14 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 4:string) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashStringKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 6:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_COLUMN keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -1331,7 +1335,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: diff --git ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out index 90086ea..eec008a 100644 --- ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out +++ ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out @@ -1265,10 +1265,11 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 16:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ws_order_number (type: int) @@ -1292,7 +1293,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1323,6 +1324,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_count_simple.q.out ql/src/test/results/clientpositive/llap/vector_count_simple.q.out new file mode 100644 index 0000000..bf9fa8d --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_count_simple.q.out @@ -0,0 +1,7011 @@ +PREHOOK: query: CREATE TABLE groupby_long_1a_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1a_txt +POSTHOOK: query: CREATE TABLE groupby_long_1a_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1a.txt' OVERWRITE INTO TABLE groupby_long_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_long_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1a.txt' OVERWRITE INTO TABLE groupby_long_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_long_1a_txt +PREHOOK: query: CREATE TABLE groupby_long_1a STORED AS ORC AS SELECT * FROM groupby_long_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_long_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1a +POSTHOOK: query: CREATE TABLE groupby_long_1a STORED AS ORC AS SELECT * FROM groupby_long_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_long_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1a +POSTHOOK: Lineage: groupby_long_1a.key SIMPLE [(groupby_long_1a_txt)groupby_long_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: insert into groupby_long_1a values (NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1a +POSTHOOK: query: insert into groupby_long_1a values (NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1a +POSTHOOK: Lineage: groupby_long_1a.key EXPRESSION [] +PREHOOK: query: insert into groupby_long_1a values (-5206670856103795573) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1a +POSTHOOK: query: insert into groupby_long_1a values (-5206670856103795573) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1a +POSTHOOK: Lineage: groupby_long_1a.key SCRIPT [] +PREHOOK: query: insert into groupby_long_1a values (800) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1a +POSTHOOK: query: insert into groupby_long_1a values (800) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1a +POSTHOOK: Lineage: groupby_long_1a.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_long_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1a_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_long_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1a_nonull.txt' OVERWRITE INTO TABLE groupby_long_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_long_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1a_nonull.txt' OVERWRITE INTO TABLE groupby_long_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_long_1a_nonull_txt +PREHOOK: query: CREATE TABLE groupby_long_1a_nonull STORED AS ORC AS SELECT * FROM groupby_long_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_long_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1a_nonull +POSTHOOK: query: CREATE TABLE groupby_long_1a_nonull STORED AS ORC AS SELECT * FROM groupby_long_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_long_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1a_nonull +POSTHOOK: Lineage: groupby_long_1a_nonull.key SIMPLE [(groupby_long_1a_nonull_txt)groupby_long_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: insert into groupby_long_1a_nonull values (-6187919478609154811) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1a_nonull +POSTHOOK: query: insert into groupby_long_1a_nonull values (-6187919478609154811) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1a_nonull +POSTHOOK: Lineage: groupby_long_1a_nonull.key SCRIPT [] +PREHOOK: query: insert into groupby_long_1a_nonull values (1000) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1a_nonull +POSTHOOK: query: insert into groupby_long_1a_nonull values (1000) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1a_nonull +POSTHOOK: Lineage: groupby_long_1a_nonull.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_long_1b_txt(key smallint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1b_txt +POSTHOOK: query: CREATE TABLE groupby_long_1b_txt(key smallint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1b.txt' OVERWRITE INTO TABLE groupby_long_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_long_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1b.txt' OVERWRITE INTO TABLE groupby_long_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_long_1b_txt +PREHOOK: query: CREATE TABLE groupby_long_1b STORED AS ORC AS SELECT * FROM groupby_long_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_long_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1b +POSTHOOK: query: CREATE TABLE groupby_long_1b STORED AS ORC AS SELECT * FROM groupby_long_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_long_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1b +POSTHOOK: Lineage: groupby_long_1b.key SIMPLE [(groupby_long_1b_txt)groupby_long_1b_txt.FieldSchema(name:key, type:smallint, comment:null), ] +PREHOOK: query: insert into groupby_long_1b values (NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1b +POSTHOOK: query: insert into groupby_long_1b values (NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1b +POSTHOOK: Lineage: groupby_long_1b.key EXPRESSION [] +PREHOOK: query: insert into groupby_long_1b values (32030) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1b +POSTHOOK: query: insert into groupby_long_1b values (32030) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1b +POSTHOOK: Lineage: groupby_long_1b.key SCRIPT [] +PREHOOK: query: insert into groupby_long_1b values (800) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1b +POSTHOOK: query: insert into groupby_long_1b values (800) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1b +POSTHOOK: Lineage: groupby_long_1b.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_long_1b_nonull_txt(key smallint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1b_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_long_1b_nonull_txt(key smallint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1b_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1b_nonull.txt' OVERWRITE INTO TABLE groupby_long_1b_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_long_1b_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1b_nonull.txt' OVERWRITE INTO TABLE groupby_long_1b_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_long_1b_nonull_txt +PREHOOK: query: CREATE TABLE groupby_long_1b_nonull STORED AS ORC AS SELECT * FROM groupby_long_1b_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_long_1b_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1b_nonull +POSTHOOK: query: CREATE TABLE groupby_long_1b_nonull STORED AS ORC AS SELECT * FROM groupby_long_1b_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_long_1b_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1b_nonull +POSTHOOK: Lineage: groupby_long_1b_nonull.key SIMPLE [(groupby_long_1b_nonull_txt)groupby_long_1b_nonull_txt.FieldSchema(name:key, type:smallint, comment:null), ] +PREHOOK: query: insert into groupby_long_1b_nonull values (31713) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1b_nonull +POSTHOOK: query: insert into groupby_long_1b_nonull values (31713) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1b_nonull +POSTHOOK: Lineage: groupby_long_1b_nonull.key SCRIPT [] +PREHOOK: query: insert into groupby_long_1b_nonull values (34) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1b_nonull +POSTHOOK: query: insert into groupby_long_1b_nonull values (34) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1b_nonull +POSTHOOK: Lineage: groupby_long_1b_nonull.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_long_1c_txt(key int, b_string string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1c_txt +POSTHOOK: query: CREATE TABLE groupby_long_1c_txt(key int, b_string string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1c_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1c.txt' OVERWRITE INTO TABLE groupby_long_1c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_long_1c_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1c.txt' OVERWRITE INTO TABLE groupby_long_1c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_long_1c_txt +PREHOOK: query: CREATE TABLE groupby_long_1c STORED AS ORC AS SELECT * FROM groupby_long_1c_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_long_1c_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1c +POSTHOOK: query: CREATE TABLE groupby_long_1c STORED AS ORC AS SELECT * FROM groupby_long_1c_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_long_1c_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1c +POSTHOOK: Lineage: groupby_long_1c.b_string SIMPLE [(groupby_long_1c_txt)groupby_long_1c_txt.FieldSchema(name:b_string, type:string, comment:null), ] +POSTHOOK: Lineage: groupby_long_1c.key SIMPLE [(groupby_long_1c_txt)groupby_long_1c_txt.FieldSchema(name:key, type:int, comment:null), ] +PREHOOK: query: insert into groupby_long_1c values (NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1c +POSTHOOK: query: insert into groupby_long_1c values (NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1c +POSTHOOK: Lineage: groupby_long_1c.b_string EXPRESSION [] +POSTHOOK: Lineage: groupby_long_1c.key EXPRESSION [] +PREHOOK: query: insert into groupby_long_1c values (NULL, 'TKTKGVGFW') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1c +POSTHOOK: query: insert into groupby_long_1c values (NULL, 'TKTKGVGFW') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1c +POSTHOOK: Lineage: groupby_long_1c.b_string SCRIPT [] +POSTHOOK: Lineage: groupby_long_1c.key EXPRESSION [] +PREHOOK: query: insert into groupby_long_1c values (NULL, 'NEW') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1c +POSTHOOK: query: insert into groupby_long_1c values (NULL, 'NEW') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1c +POSTHOOK: Lineage: groupby_long_1c.b_string SCRIPT [] +POSTHOOK: Lineage: groupby_long_1c.key EXPRESSION [] +PREHOOK: query: CREATE TABLE groupby_long_1c_nonull_txt(key int, b_string string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1c_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_long_1c_nonull_txt(key int, b_string string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1c_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1c_nonull.txt' OVERWRITE INTO TABLE groupby_long_1c_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_long_1c_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_long_1c_nonull.txt' OVERWRITE INTO TABLE groupby_long_1c_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_long_1c_nonull_txt +PREHOOK: query: CREATE TABLE groupby_long_1c_nonull STORED AS ORC AS SELECT * FROM groupby_long_1c_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_long_1c_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_long_1c_nonull +POSTHOOK: query: CREATE TABLE groupby_long_1c_nonull STORED AS ORC AS SELECT * FROM groupby_long_1c_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_long_1c_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_long_1c_nonull +POSTHOOK: Lineage: groupby_long_1c_nonull.b_string SIMPLE [(groupby_long_1c_nonull_txt)groupby_long_1c_nonull_txt.FieldSchema(name:b_string, type:string, comment:null), ] +POSTHOOK: Lineage: groupby_long_1c_nonull.key SIMPLE [(groupby_long_1c_nonull_txt)groupby_long_1c_nonull_txt.FieldSchema(name:key, type:int, comment:null), ] +PREHOOK: query: insert into groupby_long_1c values (1928928239, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1c +POSTHOOK: query: insert into groupby_long_1c values (1928928239, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1c +POSTHOOK: Lineage: groupby_long_1c.b_string EXPRESSION [] +POSTHOOK: Lineage: groupby_long_1c.key SCRIPT [] +PREHOOK: query: insert into groupby_long_1c values (9999, 'NEW') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_long_1c +POSTHOOK: query: insert into groupby_long_1c values (9999, 'NEW') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_long_1c +POSTHOOK: Lineage: groupby_long_1c.b_string SCRIPT [] +POSTHOOK: Lineage: groupby_long_1c.key SCRIPT [] +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_long_1a group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_long_1a group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1a + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: bigint) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: bigint) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: bigint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_long_1a group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1a group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +-5206670856103795573 2 +-5310365297525168078 1 +-6187919478609154811 4 +-8460550397108077433 1 +1569543799237464101 1 +3313583664488247651 1 +800 1 +968819023021777205 1 +NULL 0 +PREHOOK: query: select key, count(key) from groupby_long_1a where key != -8460550397108077433 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1a where key != -8460550397108077433 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +-5206670856103795573 2 +-5310365297525168078 1 +-6187919478609154811 4 +1569543799237464101 1 +3313583664488247651 1 +800 1 +968819023021777205 1 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_long_1a group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_long_1a group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1a + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: bigint) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: bigint) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: bigint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_long_1a group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1a group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +-5206670856103795573 2 +-5310365297525168078 1 +-6187919478609154811 4 +-8460550397108077433 1 +1569543799237464101 1 +3313583664488247651 1 +800 1 +968819023021777205 1 +NULL 2 +PREHOOK: query: select key, count(*) from groupby_long_1a where key != -8460550397108077433 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1a where key != -8460550397108077433 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a +#### A masked pattern was here #### +-5206670856103795573 2 +-5310365297525168078 1 +-6187919478609154811 4 +1569543799237464101 1 +3313583664488247651 1 +800 1 +968819023021777205 1 +PREHOOK: query: select key, count(key) from groupby_long_1a_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1a_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 1 +-5310365297525168078 1 +-6187919478609154811 5 +-8460550397108077433 1 +1000 1 +1569543799237464101 1 +3313583664488247651 1 +968819023021777205 1 +PREHOOK: query: select key, count(key) from groupby_long_1a_nonull where key != 1569543799237464101 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1a_nonull where key != 1569543799237464101 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 1 +-5310365297525168078 1 +-6187919478609154811 5 +-8460550397108077433 1 +1000 1 +3313583664488247651 1 +968819023021777205 1 +PREHOOK: query: select key, count(*) from groupby_long_1a_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1a_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 1 +-5310365297525168078 1 +-6187919478609154811 5 +-8460550397108077433 1 +1000 1 +1569543799237464101 1 +3313583664488247651 1 +968819023021777205 1 +PREHOOK: query: select key, count(*) from groupby_long_1a_nonull where key != 1569543799237464101 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1a_nonull where key != 1569543799237464101 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 1 +-5310365297525168078 1 +-6187919478609154811 5 +-8460550397108077433 1 +1000 1 +3313583664488247651 1 +968819023021777205 1 +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_long_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_long_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1b + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: smallint) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: smallint) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: smallint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_long_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +-25394 1 +31713 10 +32030 2 +800 1 +NULL 0 +PREHOOK: query: select key, count(key) from groupby_long_1b where key != 32030 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1b where key != 32030 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +-25394 1 +31713 10 +800 1 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_long_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_long_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1b + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: smallint) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: smallint) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: smallint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_long_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +-25394 1 +31713 10 +32030 2 +800 1 +NULL 2 +PREHOOK: query: select key, count(*) from groupby_long_1b where key != 32030 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1b where key != 32030 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b +#### A masked pattern was here #### +-25394 1 +31713 10 +800 1 +PREHOOK: query: select key, count(key) from groupby_long_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +-25394 1 +31713 11 +32030 1 +34 1 +PREHOOK: query: select key, count(key) from groupby_long_1b_nonull where key != 32030 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1b_nonull where key != 32030 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +-25394 1 +31713 11 +34 1 +PREHOOK: query: select key, count(*) from groupby_long_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +-25394 1 +31713 11 +32030 1 +34 1 +PREHOOK: query: select key, count(*) from groupby_long_1b_nonull where key != 32030 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1b_nonull where key != 32030 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1b_nonull +#### A masked pattern was here #### +-25394 1 +31713 11 +34 1 +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_long_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_long_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1c + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_long_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +-1437463633 5 +1725068083 1 +1928928239 5 +9999 1 +NULL 0 +PREHOOK: query: select key, count(key) from groupby_long_1c where key != -1437463633 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1c where key != -1437463633 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +1725068083 1 +1928928239 5 +9999 1 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_long_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_long_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1c + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 16 Data size: 64 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_long_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +-1437463633 5 +1725068083 1 +1928928239 5 +9999 1 +NULL 4 +PREHOOK: query: select key, count(*) from groupby_long_1c where key != -1437463633 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1c where key != -1437463633 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +1725068083 1 +1928928239 5 +9999 1 +PREHOOK: query: explain vectorization operator +select key, count(b_string) from groupby_long_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(b_string) from groupby_long_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_long_1c + Statistics: Num rows: 16 Data size: 3008 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), b_string (type: string) + outputColumnNames: key, b_string + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 16 Data size: 3008 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(b_string) + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 16 Data size: 3008 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 16 Data size: 3008 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 1504 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 1504 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(b_string) from groupby_long_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(b_string) from groupby_long_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +-1437463633 4 +1725068083 1 +1928928239 2 +9999 1 +NULL 3 +PREHOOK: query: select key, count(b_string) from groupby_long_1c where key != -1437463633 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(b_string) from groupby_long_1c where key != -1437463633 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c +#### A masked pattern was here #### +1725068083 1 +1928928239 2 +9999 1 +PREHOOK: query: select key, count(key) from groupby_long_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +-1437463633 5 +1725068083 1 +1928928239 4 +PREHOOK: query: select key, count(key) from groupby_long_1c_nonull where key != -1437463633 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_long_1c_nonull where key != -1437463633 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +1725068083 1 +1928928239 4 +PREHOOK: query: select key, count(*) from groupby_long_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +-1437463633 5 +1725068083 1 +1928928239 4 +PREHOOK: query: select key, count(*) from groupby_long_1c_nonull where key != -1437463633 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_long_1c_nonull where key != -1437463633 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +1725068083 1 +1928928239 4 +PREHOOK: query: select key, count(b_string) from groupby_long_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(b_string) from groupby_long_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +-1437463633 4 +1725068083 1 +1928928239 2 +PREHOOK: query: select key, count(b_string) from groupby_long_1c_nonull where key != -1437463633 group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(b_string) from groupby_long_1c_nonull where key != -1437463633 group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_long_1c_nonull +#### A masked pattern was here #### +1725068083 1 +1928928239 2 +PREHOOK: query: CREATE TABLE groupby_string_1a_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1a_txt +POSTHOOK: query: CREATE TABLE groupby_string_1a_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a.txt' OVERWRITE INTO TABLE groupby_string_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_string_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a.txt' OVERWRITE INTO TABLE groupby_string_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_string_1a_txt +PREHOOK: query: CREATE TABLE groupby_string_1a STORED AS ORC AS SELECT * FROM groupby_string_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_string_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: CREATE TABLE groupby_string_1a STORED AS ORC AS SELECT * FROM groupby_string_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_string_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key SIMPLE [(groupby_string_1a_txt)groupby_string_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: insert into groupby_string_1a values (NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: insert into groupby_string_1a values (NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key EXPRESSION [] +PREHOOK: query: insert into groupby_string_1a values ('QNCYBDW') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: insert into groupby_string_1a values ('QNCYBDW') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key SCRIPT [] +PREHOOK: query: insert into groupby_string_1a values ('NOT') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: insert into groupby_string_1a values ('NOT') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_string_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1a_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_string_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a_nonull.txt' OVERWRITE INTO TABLE groupby_string_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_string_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a_nonull.txt' OVERWRITE INTO TABLE groupby_string_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_string_1a_nonull_txt +PREHOOK: query: CREATE TABLE groupby_string_1a_nonull STORED AS ORC AS SELECT * FROM groupby_string_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_string_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1a_nonull +POSTHOOK: query: CREATE TABLE groupby_string_1a_nonull STORED AS ORC AS SELECT * FROM groupby_string_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_string_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1a_nonull +POSTHOOK: Lineage: groupby_string_1a_nonull.key SIMPLE [(groupby_string_1a_nonull_txt)groupby_string_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: insert into groupby_string_1a_nonull values ('PXLD') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a_nonull +POSTHOOK: query: insert into groupby_string_1a_nonull values ('PXLD') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a_nonull +POSTHOOK: Lineage: groupby_string_1a_nonull.key SCRIPT [] +PREHOOK: query: insert into groupby_string_1a_nonull values ('AA') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a_nonull +POSTHOOK: query: insert into groupby_string_1a_nonull values ('AA') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a_nonull +POSTHOOK: Lineage: groupby_string_1a_nonull.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_string_1b_txt(key char(4)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1b_txt +POSTHOOK: query: CREATE TABLE groupby_string_1b_txt(key char(4)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a.txt' OVERWRITE INTO TABLE groupby_string_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_string_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a.txt' OVERWRITE INTO TABLE groupby_string_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_string_1b_txt +PREHOOK: query: CREATE TABLE groupby_string_1b STORED AS ORC AS SELECT * FROM groupby_string_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_string_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1b +POSTHOOK: query: CREATE TABLE groupby_string_1b STORED AS ORC AS SELECT * FROM groupby_string_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_string_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1b +POSTHOOK: Lineage: groupby_string_1b.key SIMPLE [(groupby_string_1b_txt)groupby_string_1b_txt.FieldSchema(name:key, type:char(4), comment:null), ] +PREHOOK: query: insert into groupby_string_1a values (NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: insert into groupby_string_1a values (NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key EXPRESSION [] +PREHOOK: query: insert into groupby_string_1a values ('QNCYBDW') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: insert into groupby_string_1a values ('QNCYBDW') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key SCRIPT [] +PREHOOK: query: insert into groupby_string_1a values ('NOT') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1a +POSTHOOK: query: insert into groupby_string_1a values ('NOT') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1a +POSTHOOK: Lineage: groupby_string_1a.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_string_1b_nonull_txt(key char(4)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1b_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_string_1b_nonull_txt(key char(4)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1b_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a_nonull.txt' OVERWRITE INTO TABLE groupby_string_1b_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_string_1b_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1a_nonull.txt' OVERWRITE INTO TABLE groupby_string_1b_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_string_1b_nonull_txt +PREHOOK: query: CREATE TABLE groupby_string_1b_nonull STORED AS ORC AS SELECT * FROM groupby_string_1b_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_string_1b_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1b_nonull +POSTHOOK: query: CREATE TABLE groupby_string_1b_nonull STORED AS ORC AS SELECT * FROM groupby_string_1b_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_string_1b_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1b_nonull +POSTHOOK: Lineage: groupby_string_1b_nonull.key SIMPLE [(groupby_string_1b_nonull_txt)groupby_string_1b_nonull_txt.FieldSchema(name:key, type:char(4), comment:null), ] +PREHOOK: query: insert into groupby_string_1b_nonull values ('PXLD') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1b_nonull +POSTHOOK: query: insert into groupby_string_1b_nonull values ('PXLD') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1b_nonull +POSTHOOK: Lineage: groupby_string_1b_nonull.key SCRIPT [] +PREHOOK: query: insert into groupby_string_1b_nonull values ('AA') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1b_nonull +POSTHOOK: query: insert into groupby_string_1b_nonull values ('AA') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1b_nonull +POSTHOOK: Lineage: groupby_string_1b_nonull.key SCRIPT [] +PREHOOK: query: CREATE TABLE groupby_string_1c_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1c_txt +POSTHOOK: query: CREATE TABLE groupby_string_1c_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1c_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1c.txt' OVERWRITE INTO TABLE groupby_string_1c_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_string_1c_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1c.txt' OVERWRITE INTO TABLE groupby_string_1c_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_string_1c_txt +PREHOOK: query: CREATE TABLE groupby_string_1c STORED AS ORC AS SELECT * FROM groupby_string_1c_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_string_1c_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: CREATE TABLE groupby_string_1c STORED AS ORC AS SELECT * FROM groupby_string_1c_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_string_1c_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SIMPLE [(groupby_string_1c_txt)groupby_string_1c_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: groupby_string_1c.s_date SIMPLE [(groupby_string_1c_txt)groupby_string_1c_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp SIMPLE [(groupby_string_1c_txt)groupby_string_1c_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: insert into groupby_string_1c values (NULL, NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values (NULL, NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c.s_date EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c values (NULL, '2141-02-19', '2092-06-07 06:42:30.000538454') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values (NULL, '2141-02-19', '2092-06-07 06:42:30.000538454') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp SCRIPT [] +PREHOOK: query: insert into groupby_string_1c values (NULL, '2018-04-11', NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values (NULL, '2018-04-11', NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c values ('ATZJTPECF', NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values ('ATZJTPECF', NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_date EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c values ('ATZJTPECF', '2144-01-13', '2092-06-07 06:42:30.000538454') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values ('ATZJTPECF', '2144-01-13', '2092-06-07 06:42:30.000538454') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp SCRIPT [] +PREHOOK: query: insert into groupby_string_1c values ('ATZJTPECF', '1988-04-23', NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values ('ATZJTPECF', '1988-04-23', NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c values ('BB', NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values ('BB', NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_date EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c values ('CC', '2018-04-12', '2092-06-07 06:42:30.000538454') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values ('CC', '2018-04-12', '2092-06-07 06:42:30.000538454') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp SCRIPT [] +PREHOOK: query: insert into groupby_string_1c values ('DD', '2018-04-14', NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c +POSTHOOK: query: insert into groupby_string_1c values ('DD', '2018-04-14', NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c +POSTHOOK: Lineage: groupby_string_1c.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c.s_timestamp EXPRESSION [] +PREHOOK: query: CREATE TABLE groupby_string_1c_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1c_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_string_1c_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1c_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1c_nonull.txt' OVERWRITE INTO TABLE groupby_string_1c_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_string_1c_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_string_1c_nonull.txt' OVERWRITE INTO TABLE groupby_string_1c_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_string_1c_nonull_txt +PREHOOK: query: CREATE TABLE groupby_string_1c_nonull STORED AS ORC AS SELECT * FROM groupby_string_1c_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_string_1c_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: CREATE TABLE groupby_string_1c_nonull STORED AS ORC AS SELECT * FROM groupby_string_1c_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_string_1c_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SIMPLE [(groupby_string_1c_nonull_txt)groupby_string_1c_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date SIMPLE [(groupby_string_1c_nonull_txt)groupby_string_1c_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp SIMPLE [(groupby_string_1c_nonull_txt)groupby_string_1c_nonull_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: insert into groupby_string_1c_nonull values ('SDA', NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: insert into groupby_string_1c_nonull values ('SDA', NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c_nonull values ('SDA', '2144-01-13', '2092-06-07 06:42:30.000538454') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: insert into groupby_string_1c_nonull values ('SDA', '2144-01-13', '2092-06-07 06:42:30.000538454') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp SCRIPT [] +PREHOOK: query: insert into groupby_string_1c_nonull values ('SDA', '1988-04-23', NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: insert into groupby_string_1c_nonull values ('SDA', '1988-04-23', NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c_nonull values ('EEE', NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: insert into groupby_string_1c_nonull values ('EEE', NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date EXPRESSION [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp EXPRESSION [] +PREHOOK: query: insert into groupby_string_1c_nonull values ('FFF', '880-11-01', '22073-03-21 15:32:57.617920888') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: insert into groupby_string_1c_nonull values ('FFF', '880-11-01', '22073-03-21 15:32:57.617920888') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp SCRIPT [] +PREHOOK: query: insert into groupby_string_1c_nonull values ('GGG', '2018-04-15', NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: query: insert into groupby_string_1c_nonull values ('GGG', '2018-04-15', NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@groupby_string_1c_nonull +POSTHOOK: Lineage: groupby_string_1c_nonull.key SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_date SCRIPT [] +POSTHOOK: Lineage: groupby_string_1c_nonull.s_timestamp EXPRESSION [] +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_string_1a group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_string_1a group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1a + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: string) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 9 Data size: 1656 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 9 Data size: 1656 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_string_1a group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1a group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +FTWURVH 1 +MXGDMBD 1 +NOT 2 +NULL 0 +PXLD 3 +QNCYBDW 3 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(key) from groupby_string_1a where key != 'PXLD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1a where key != 'PXLD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +FTWURVH 1 +MXGDMBD 1 +NOT 2 +QNCYBDW 3 +UA 1 +WXHJ 5 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_string_1a group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_string_1a group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1a + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: string) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 19 Data size: 3496 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 9 Data size: 1656 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 9 Data size: 1656 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_string_1a group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1a group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +FTWURVH 1 +MXGDMBD 1 +NOT 2 +NULL 3 +PXLD 3 +QNCYBDW 3 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(*) from groupby_string_1a where key != 'PXLD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1a where key != 'PXLD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a +#### A masked pattern was here #### +FTWURVH 1 +MXGDMBD 1 +NOT 2 +QNCYBDW 3 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(key) from groupby_string_1a_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1a_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +AA 1 +FTWURVH 1 +MXGDMBD 1 +PXLD 4 +QNCYBDW 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(key) from groupby_string_1a_nonull where key != 'MXGDMBD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1a_nonull where key != 'MXGDMBD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +AA 1 +FTWURVH 1 +PXLD 4 +QNCYBDW 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(*) from groupby_string_1a_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1a_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +AA 1 +FTWURVH 1 +MXGDMBD 1 +PXLD 4 +QNCYBDW 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(*) from groupby_string_1a_nonull where key != 'MXGDMBD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1a_nonull where key != 'MXGDMBD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1a_nonull +#### A masked pattern was here #### +AA 1 +FTWURVH 1 +PXLD 4 +QNCYBDW 1 +UA 1 +WXHJ 5 +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_string_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_string_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1b + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: char(4)) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: char(4)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(4)) + sort order: + + Map-reduce partition columns: _col0 (type: char(4)) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: char(4)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6 Data size: 528 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 528 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_string_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +FTWU 1 +MXGD 1 +NULL 0 +PXLD 3 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(key) from groupby_string_1b where key != 'MXGD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1b where key != 'MXGD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +FTWU 1 +PXLD 3 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_string_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_string_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1b + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: char(4)) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: char(4)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(4)) + sort order: + + Map-reduce partition columns: _col0 (type: char(4)) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 1144 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: char(4)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6 Data size: 528 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 528 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_string_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +FTWU 1 +MXGD 1 +NULL 1 +PXLD 3 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(*) from groupby_string_1b where key != 'MXGD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1b where key != 'MXGD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b +#### A masked pattern was here #### +FTWU 1 +PXLD 3 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(key) from groupby_string_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +AA 1 +FTWU 1 +MXGD 1 +PXLD 4 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(key) from groupby_string_1b_nonull where key != 'MXGD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1b_nonull where key != 'MXGD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +AA 1 +FTWU 1 +PXLD 4 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(*) from groupby_string_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +AA 1 +FTWU 1 +MXGD 1 +PXLD 4 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: select key, count(*) from groupby_string_1b_nonull where key != 'MXGD' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1b_nonull where key != 'MXGD' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1b_nonull +#### A masked pattern was here #### +AA 1 +FTWU 1 +PXLD 4 +QNCY 1 +UA 1 +WXHJ 5 +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_string_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_string_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1c + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: string) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 4141 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 4141 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_string_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 5 +BB 1 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +NULL 0 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(key) from groupby_string_1c where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1c where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 5 +BB 1 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_string_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_string_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1c + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: string) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 8464 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 4141 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 4141 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_string_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 5 +BB 1 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +NULL 6 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(*) from groupby_string_1c where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1c where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 5 +BB 1 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: explain vectorization operator +select key, count(s_date) from groupby_string_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(s_date) from groupby_string_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1c + Statistics: Num rows: 47 Data size: 11040 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: string), s_date (type: date) + outputColumnNames: key, s_date + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 11040 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(s_date) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 11040 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 11040 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 5402 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 5402 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(s_date) from groupby_string_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_date) from groupby_string_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 4 +BB 0 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 0 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +NULL 5 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 0 +PREHOOK: query: select key, count(s_date) from groupby_string_1c where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_date) from groupby_string_1c where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 4 +BB 0 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 0 +PREHOOK: query: explain vectorization operator +select key, count(s_timestamp) from groupby_string_1c group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(s_timestamp) from groupby_string_1c group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_string_1c + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: string), s_timestamp (type: timestamp) + outputColumnNames: key, s_timestamp + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(s_timestamp) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 5042 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 5042 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(s_timestamp) from groupby_string_1c group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_timestamp) from groupby_string_1c group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 3 +BB 0 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 0 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 2 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 0 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +NULL 4 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(s_timestamp) from groupby_string_1c where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_timestamp) from groupby_string_1c where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 3 +BB 0 +BDBMW 1 +BEP 2 +CC 1 +CQMTQLI 2 +DD 0 +FROPIK 3 +FTWURVH 1 +FYW 1 +GOYJHW 2 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 1 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(key) from groupby_string_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 1 +FFF 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 4 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(key) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 1 +FFF 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 4 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(*) from groupby_string_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 1 +FFF 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 4 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(*) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 1 +FFF 1 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 4 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(s_date) from groupby_string_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_date) from groupby_string_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 0 +FFF 0 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 0 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 3 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 0 +PREHOOK: query: select key, count(s_date) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_date) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 0 +FFF 0 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 1 +GOYJHW 3 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 3 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 0 +PREHOOK: query: select key, count(s_timestamp) from groupby_string_1c_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_timestamp) from groupby_string_1c_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 0 +FFF 0 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 0 +GOYJHW 2 +GSJPSIYOU 1 +IOQIDQBHU 1 +IWEZJHKE 0 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 2 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: select key, count(s_timestamp) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(s_timestamp) from groupby_string_1c_nonull where key != 'IWEZJHKE' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_string_1c_nonull +#### A masked pattern was here #### + 2 +AARNZRVZQ 2 +ATZJTPECF 2 +BDBMW 1 +BEP 2 +CQMTQLI 2 +EEE 0 +FFF 0 +FROPIK 3 +FTWURVH 1 +FYW 1 +GGG 0 +GOYJHW 2 +GSJPSIYOU 1 +IOQIDQBHU 1 +KL 1 +LOTLS 3 +MXGDMBD 1 +NADANUQMW 1 +QTSRKSKB 1 +SDA 2 +VNRXWQ 2 +WNGFTTY 2 +ZNOUDCR 1 +PREHOOK: query: CREATE TABLE groupby_serialize_1a_txt(key timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1a_txt +POSTHOOK: query: CREATE TABLE groupby_serialize_1a_txt(key timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1a.txt' OVERWRITE INTO TABLE groupby_serialize_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_serialize_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1a.txt' OVERWRITE INTO TABLE groupby_serialize_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_serialize_1a_txt +PREHOOK: query: CREATE TABLE groupby_serialize_1a STORED AS ORC AS SELECT * FROM groupby_serialize_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_serialize_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1a +POSTHOOK: query: CREATE TABLE groupby_serialize_1a STORED AS ORC AS SELECT * FROM groupby_serialize_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_serialize_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1a +POSTHOOK: Lineage: groupby_serialize_1a.key SIMPLE [(groupby_serialize_1a_txt)groupby_serialize_1a_txt.FieldSchema(name:key, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE groupby_serialize_1a_nonull_txt(key timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1a_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_serialize_1a_nonull_txt(key timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1a_nonull.txt' OVERWRITE INTO TABLE groupby_serialize_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_serialize_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1a_nonull.txt' OVERWRITE INTO TABLE groupby_serialize_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_serialize_1a_nonull_txt +PREHOOK: query: CREATE TABLE groupby_serialize_1a_nonull STORED AS ORC AS SELECT * FROM groupby_serialize_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_serialize_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1a_nonull +POSTHOOK: query: CREATE TABLE groupby_serialize_1a_nonull STORED AS ORC AS SELECT * FROM groupby_serialize_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_serialize_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1a_nonull +POSTHOOK: Lineage: groupby_serialize_1a_nonull.key SIMPLE [(groupby_serialize_1a_nonull_txt)groupby_serialize_1a_nonull_txt.FieldSchema(name:key, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE groupby_serialize_1b_txt(key timestamp, c_smallint smallint, c_string string, c_double double) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1b_txt +POSTHOOK: query: CREATE TABLE groupby_serialize_1b_txt(key timestamp, c_smallint smallint, c_string string, c_double double) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1b.txt' OVERWRITE INTO TABLE groupby_serialize_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_serialize_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1b.txt' OVERWRITE INTO TABLE groupby_serialize_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_serialize_1b_txt +PREHOOK: query: CREATE TABLE groupby_serialize_1b STORED AS ORC AS SELECT * FROM groupby_serialize_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_serialize_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1b +POSTHOOK: query: CREATE TABLE groupby_serialize_1b STORED AS ORC AS SELECT * FROM groupby_serialize_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_serialize_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1b +POSTHOOK: Lineage: groupby_serialize_1b.c_double SIMPLE [(groupby_serialize_1b_txt)groupby_serialize_1b_txt.FieldSchema(name:c_double, type:double, comment:null), ] +POSTHOOK: Lineage: groupby_serialize_1b.c_smallint SIMPLE [(groupby_serialize_1b_txt)groupby_serialize_1b_txt.FieldSchema(name:c_smallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: groupby_serialize_1b.c_string SIMPLE [(groupby_serialize_1b_txt)groupby_serialize_1b_txt.FieldSchema(name:c_string, type:string, comment:null), ] +POSTHOOK: Lineage: groupby_serialize_1b.key SIMPLE [(groupby_serialize_1b_txt)groupby_serialize_1b_txt.FieldSchema(name:key, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE groupby_serialize_1b_nonull_txt(key timestamp, c_smallint smallint, c_string string, c_double double) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1b_nonull_txt +POSTHOOK: query: CREATE TABLE groupby_serialize_1b_nonull_txt(key timestamp, c_smallint smallint, c_string string, c_double double) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1b_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1b_nonull.txt' OVERWRITE INTO TABLE groupby_serialize_1b_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@groupby_serialize_1b_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_serialize_1b_nonull.txt' OVERWRITE INTO TABLE groupby_serialize_1b_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@groupby_serialize_1b_nonull_txt +PREHOOK: query: CREATE TABLE groupby_serialize_1b_nonull STORED AS ORC AS SELECT * FROM groupby_serialize_1b_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@groupby_serialize_1b_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@groupby_serialize_1b_nonull +POSTHOOK: query: CREATE TABLE groupby_serialize_1b_nonull STORED AS ORC AS SELECT * FROM groupby_serialize_1b_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@groupby_serialize_1b_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@groupby_serialize_1b_nonull +POSTHOOK: Lineage: groupby_serialize_1b_nonull.c_double SIMPLE [(groupby_serialize_1b_nonull_txt)groupby_serialize_1b_nonull_txt.FieldSchema(name:c_double, type:double, comment:null), ] +POSTHOOK: Lineage: groupby_serialize_1b_nonull.c_smallint SIMPLE [(groupby_serialize_1b_nonull_txt)groupby_serialize_1b_nonull_txt.FieldSchema(name:c_smallint, type:smallint, comment:null), ] +POSTHOOK: Lineage: groupby_serialize_1b_nonull.c_string SIMPLE [(groupby_serialize_1b_nonull_txt)groupby_serialize_1b_nonull_txt.FieldSchema(name:c_string, type:string, comment:null), ] +POSTHOOK: Lineage: groupby_serialize_1b_nonull.key SIMPLE [(groupby_serialize_1b_nonull_txt)groupby_serialize_1b_nonull_txt.FieldSchema(name:key, type:timestamp, comment:null), ] +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_serialize_1a group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_serialize_1a group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_serialize_1a + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: timestamp) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 320 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 320 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_serialize_1a group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1a group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2082-07-14 04:00:40.695380469 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +NULL 0 +PREHOOK: query: select key, count(key) from groupby_serialize_1a where key != '2082-07-14 04:00:40.695380469' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1a where key != '2082-07-14 04:00:40.695380469' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_serialize_1a group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_serialize_1a group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_serialize_1a + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: timestamp) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 17 Data size: 680 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 320 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 8 Data size: 320 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_serialize_1a group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1a group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2082-07-14 04:00:40.695380469 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +NULL 4 +PREHOOK: query: select key, count(*) from groupby_serialize_1a where key != '2082-07-14 04:00:40.695380469' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1a where key != '2082-07-14 04:00:40.695380469' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +PREHOOK: query: select key, count(key) from groupby_serialize_1a_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1a_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2082-07-14 04:00:40.695380469 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +PREHOOK: query: select key, count(key) from groupby_serialize_1a_nonull where key != '2082-07-14 04:00:40.695380469' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1a_nonull where key != '2082-07-14 04:00:40.695380469' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +PREHOOK: query: select key, count(*) from groupby_serialize_1a_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1a_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2082-07-14 04:00:40.695380469 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +PREHOOK: query: select key, count(*) from groupby_serialize_1a_nonull where key != '2082-07-14 04:00:40.695380469' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1a_nonull where key != '2082-07-14 04:00:40.695380469' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1a_nonull +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 1 +2093-04-10 23:36:54.846 3 +2188-06-04 15:03:14.963259704 1 +2299-11-15 16:41:30.401 1 +2306-06-21 11:02:00.143124239 2 +2608-02-23 23:44:02.546440891 1 +2686-05-23 07:46:46.565832918 2 +2898-10-01 22:27:02.000871113 1 +PREHOOK: query: explain vectorization operator +select key, count(key) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(key) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_serialize_1b + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: timestamp) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 900 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 900 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(key) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2083-06-07 09:35:19.383 1 +2145-10-15 06:58:42.831 1 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +NULL 0 +PREHOOK: query: select key, count(key) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2145-10-15 06:58:42.831 1 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +PREHOOK: query: explain vectorization operator +select key, count(*) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(*) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_serialize_1b + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: timestamp) + outputColumnNames: key + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 1840 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 900 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 900 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(*) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2083-06-07 09:35:19.383 1 +2145-10-15 06:58:42.831 1 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +NULL 2 +PREHOOK: query: select key, count(*) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2145-10-15 06:58:42.831 1 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +PREHOOK: query: explain vectorization operator +select key, count(c_smallint) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(c_smallint) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_serialize_1b + Statistics: Num rows: 47 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: timestamp), c_smallint (type: smallint) + outputColumnNames: key, c_smallint + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(c_smallint) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(c_smallint) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_smallint) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 0 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2083-06-07 09:35:19.383 1 +2145-10-15 06:58:42.831 1 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +NULL 0 +PREHOOK: query: select key, count(c_smallint) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_smallint) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 0 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2145-10-15 06:58:42.831 1 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +PREHOOK: query: explain vectorization operator +select key, count(c_string) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select key, count(c_string) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: groupby_serialize_1b + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: timestamp), c_string (type: string) + outputColumnNames: key, c_string + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(c_string) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: key (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 47 Data size: 10304 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 23 Data size: 5042 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 23 Data size: 5042 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select key, count(c_string) from groupby_serialize_1b group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_string) from groupby_serialize_1b group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2083-06-07 09:35:19.383 1 +2145-10-15 06:58:42.831 0 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +NULL 0 +PREHOOK: query: select key, count(c_string) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_string) from groupby_serialize_1b where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b +#### A masked pattern was here #### +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2145-10-15 06:58:42.831 0 +2242-08-04 07:51:46.905 1 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 4 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2391-01-17 15:28:37.00045143 1 +2409-09-23 10:33:27 1 +2461-03-09 09:54:45.000982385 2 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 2 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 1 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 4 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(key) from groupby_serialize_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2083-06-07 09:35:19.383 1 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 2 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(key) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(key) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 2 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(*) from groupby_serialize_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2083-06-07 09:35:19.383 1 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 2 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(*) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(*) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 1 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 2 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(c_smallint) from groupby_serialize_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_smallint) from groupby_serialize_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 0 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2083-06-07 09:35:19.383 1 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 2 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(c_smallint) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_smallint) from groupby_serialize_1b_nonull where key != '2083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 0 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 2 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(c_string) from groupby_serialize_1b_nonull group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_string) from groupby_serialize_1b_nonull group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +1931-12-04 11:13:47.269597392 1 +1941-10-16 02:19:35.000423663 1 +1957-03-06 09:57:31 1 +1980-09-13 19:57:15 1 +2018-11-25 22:27:55.84 1 +2044-05-02 07:00:03.35 1 +2073-03-21 15:32:57.617920888 1 +2075-10-25 20:32:40.000792874 0 +2083-06-07 09:35:19.383 1 +2105-01-04 16:27:45 1 +2145-10-15 06:58:42.831 1 +2188-06-04 15:03:14.963259704 1 +2242-08-04 07:51:46.905 2 +2266-09-26 06:27:29.000284762 1 +2301-06-03 17:16:19 1 +2304-12-15 15:31:16 7 +2309-01-15 12:43:49 1 +2332-06-14 07:02:42.32 1 +2333-07-28 09:59:26 1 +2338-02-12 09:30:07 1 +2340-12-15 05:15:17.133588982 1 +2357-05-08 07:09:09.000482799 1 +2391-01-17 15:28:37.00045143 1 +2396-04-06 15:39:02.404013577 2 +2409-09-23 10:33:27 3 +2461-03-09 09:54:45.000982385 2 +2462-12-16 23:11:32.633305644 1 +2467-05-11 06:04:13.426693647 1 +2512-10-06 03:03:03 4 +2535-03-01 05:04:49.000525883 1 +2629-04-07 01:54:11 2 +2637-03-12 22:25:46.385 2 +2686-05-23 07:46:46.565832918 1 +2688-02-06 20:58:42.000947837 1 +2808-07-09 02:10:11.928498854 1 +2829-06-04 08:01:47.836 1 +2861-05-27 07:13:01.000848622 1 +2888-05-08 08:36:55.182302102 2 +2897-08-10 15:21:47.09 1 +2898-12-18 03:37:17 1 +2938-12-21 23:35:59.498 1 +2960-04-12 07:03:42.000366651 1 +2969-01-23 14:08:04.000667259 5 +2971-02-14 09:13:19 1 +PREHOOK: query: select key, count(c_string) from groupby_serialize_1b_nonull where key != '22083-06-07 09:35:19.383' group by key +PREHOOK: type: QUERY +PREHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +POSTHOOK: query: select key, count(c_string) from groupby_serialize_1b_nonull where key != '22083-06-07 09:35:19.383' group by key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@groupby_serialize_1b_nonull +#### A masked pattern was here #### +PREHOOK: query: CREATE TABLE over10k(t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal(4,2), + bin binary) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@over10k +POSTHOOK: query: CREATE TABLE over10k(t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal(4,2), + bin binary) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@over10k +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over10k' OVERWRITE INTO TABLE over10k +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@over10k +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over10k' OVERWRITE INTO TABLE over10k +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@over10k +PREHOOK: query: explain vectorization operator +select s, count(s) from over10k group by s order by s limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select s, count(s) from over10k group by s order by s limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: s (type: string) + outputColumnNames: s + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(s) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: s (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select s, count(s) from over10k group by s order by s limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select s, count(s) from over10k group by s order by s limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +alice allen 8 +alice brown 14 +alice carson 10 +alice davidson 18 +alice ellison 15 +alice falkner 17 +alice garcia 13 +alice hernandez 18 +alice ichabod 22 +alice johnson 12 +PREHOOK: query: explain vectorization operator +select s, count(ts) from over10k group by s order by s limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select s, count(ts) from over10k group by s order by s limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: s (type: string), ts (type: timestamp) + outputColumnNames: s, ts + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(ts) + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: s (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select s, count(ts) from over10k group by s order by s limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select s, count(ts) from over10k group by s order by s limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +alice allen 8 +alice brown 14 +alice carson 10 +alice davidson 18 +alice ellison 15 +alice falkner 17 +alice garcia 13 +alice hernandez 18 +alice ichabod 22 +alice johnson 12 +PREHOOK: query: explain vectorization operator +select s, count(*) from over10k group by s order by s limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select s, count(*) from over10k group by s order by s limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: s (type: string) + outputColumnNames: s + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashStringKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: s (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select s, count(*) from over10k group by s order by s limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select s, count(*) from over10k group by s order by s limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +alice allen 8 +alice brown 14 +alice carson 10 +alice davidson 18 +alice ellison 15 +alice falkner 17 +alice garcia 13 +alice hernandez 18 +alice ichabod 22 +alice johnson 12 +PREHOOK: query: explain vectorization operator +select ts, count(ts) from over10k group by ts order by ts limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select ts, count(ts) from over10k group by ts order by ts limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: ts + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(ts) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: ts (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select ts, count(ts) from over10k group by ts order by ts limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select ts, count(ts) from over10k group by ts order by ts limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +2013-03-01 09:11:58.70307 26 +2013-03-01 09:11:58.703071 50 +2013-03-01 09:11:58.703072 32 +2013-03-01 09:11:58.703073 42 +2013-03-01 09:11:58.703074 45 +2013-03-01 09:11:58.703075 38 +2013-03-01 09:11:58.703076 45 +2013-03-01 09:11:58.703077 50 +2013-03-01 09:11:58.703078 24 +2013-03-01 09:11:58.703079 43 +PREHOOK: query: explain vectorization operator +select ts, count(d) from over10k group by ts order by ts limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select ts, count(d) from over10k group by ts order by ts limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: d (type: double), ts (type: timestamp) + outputColumnNames: d, ts + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(d) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: ts (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select ts, count(d) from over10k group by ts order by ts limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select ts, count(d) from over10k group by ts order by ts limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +2013-03-01 09:11:58.70307 26 +2013-03-01 09:11:58.703071 50 +2013-03-01 09:11:58.703072 32 +2013-03-01 09:11:58.703073 42 +2013-03-01 09:11:58.703074 45 +2013-03-01 09:11:58.703075 38 +2013-03-01 09:11:58.703076 45 +2013-03-01 09:11:58.703077 50 +2013-03-01 09:11:58.703078 24 +2013-03-01 09:11:58.703079 43 +PREHOOK: query: explain vectorization operator +select ts, count(*) from over10k group by ts order by ts limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select ts, count(*) from over10k group by ts order by ts limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: ts + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: ts (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select ts, count(*) from over10k group by ts order by ts limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select ts, count(*) from over10k group by ts order by ts limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +2013-03-01 09:11:58.70307 26 +2013-03-01 09:11:58.703071 50 +2013-03-01 09:11:58.703072 32 +2013-03-01 09:11:58.703073 42 +2013-03-01 09:11:58.703074 45 +2013-03-01 09:11:58.703075 38 +2013-03-01 09:11:58.703076 45 +2013-03-01 09:11:58.703077 50 +2013-03-01 09:11:58.703078 24 +2013-03-01 09:11:58.703079 43 +PREHOOK: query: explain vectorization operator +select `dec`, count(`dec`) from over10k group by `dec` order by `dec` limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select `dec`, count(`dec`) from over10k group by `dec` order by `dec` limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: dec (type: decimal(4,2)) + outputColumnNames: dec + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(dec) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: dec (type: decimal(4,2)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Map-reduce partition columns: _col0 (type: decimal(4,2)) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: decimal(4,2)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select `dec`, count(`dec`) from over10k group by `dec` order by `dec` limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select `dec`, count(`dec`) from over10k group by `dec` order by `dec` limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +0.01 2 +0.02 1 +0.03 2 +0.04 1 +0.05 1 +0.06 3 +0.07 1 +0.08 3 +0.10 1 +0.11 1 +PREHOOK: query: explain vectorization operator +select `dec`, count(bin) from over10k group by `dec` order by `dec` limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select `dec`, count(bin) from over10k group by `dec` order by `dec` limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: dec (type: decimal(4,2)), bin (type: binary) + outputColumnNames: dec, bin + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(bin) + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: dec (type: decimal(4,2)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Map-reduce partition columns: _col0 (type: decimal(4,2)) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: decimal(4,2)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select `dec`, count(bin) from over10k group by `dec` order by `dec` limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select `dec`, count(bin) from over10k group by `dec` order by `dec` limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +0.01 2 +0.02 1 +0.03 2 +0.04 1 +0.05 1 +0.06 3 +0.07 1 +0.08 3 +0.10 1 +0.11 1 +PREHOOK: query: explain vectorization operator +select `dec`, count(*) from over10k group by `dec` order by `dec` limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select `dec`, count(*) from over10k group by `dec` order by `dec` limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: dec (type: decimal(4,2)) + outputColumnNames: dec + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashSerializeKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: dec (type: decimal(4,2)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Map-reduce partition columns: _col0 (type: decimal(4,2)) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: decimal(4,2)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select `dec`, count(*) from over10k group by `dec` order by `dec` limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select `dec`, count(*) from over10k group by `dec` order by `dec` limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +0.01 2 +0.02 1 +0.03 2 +0.04 1 +0.05 1 +0.06 3 +0.07 1 +0.08 3 +0.10 1 +0.11 1 +PREHOOK: query: explain vectorization operator +select i, count(i) from over10k group by i order by i limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select i, count(i) from over10k group by i order by i limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: i (type: int) + outputColumnNames: i + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(i) + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountKeyOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: i (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select i, count(i) from over10k group by i order by i limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select i, count(i) from over10k group by i order by i limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +65536 45 +65537 35 +65538 29 +65539 24 +65540 29 +65541 43 +65542 37 +65543 40 +65544 42 +65545 39 +PREHOOK: query: explain vectorization operator +select i, count(b) from over10k group by i order by i limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select i, count(b) from over10k group by i order by i limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: i (type: int), b (type: bigint) + outputColumnNames: i, b + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(b) + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountColumnOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: i (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select i, count(b) from over10k group by i order by i limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select i, count(b) from over10k group by i order by i limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +65536 45 +65537 35 +65538 29 +65539 24 +65540 29 +65541 43 +65542 37 +65543 40 +65544 42 +65545 39 +PREHOOK: query: explain vectorization operator +select i, count(*) from over10k group by i order by i limit 10 +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select i, count(*) from over10k group by i order by i limit 10 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: i (type: int) + outputColumnNames: i + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + className: VectorGroupByHashLongKeySingleCountStarOperator + groupByMode: HASH + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + vectorProcessingMode: HASH + keys: i (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: MERGE_PARTIAL + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Limit Vectorization: + className: VectorLimitOperator + native: true + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: select i, count(*) from over10k group by i order by i limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select i, count(*) from over10k group by i order by i limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +65536 45 +65537 35 +65538 29 +65539 24 +65540 29 +65541 43 +65542 37 +65543 40 +65544 42 +65545 39 diff --git ql/src/test/results/clientpositive/llap/vector_data_types.q.out ql/src/test/results/clientpositive/llap/vector_data_types.q.out index 8dd959e..79f937b 100644 --- ql/src/test/results/clientpositive/llap/vector_data_types.q.out +++ ql/src/test/results/clientpositive/llap/vector_data_types.q.out @@ -381,6 +381,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 902d137..0df1dc6 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -88,6 +88,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: cint (type: int) @@ -268,6 +270,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] keys: _col0 (type: int) @@ -482,6 +486,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: cint (type: int) @@ -682,6 +688,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] keys: _col0 (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 50e4305..6cc6b2e 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -592,6 +592,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -1211,6 +1213,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out index c6867f8..ad8020e 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out @@ -2304,6 +2304,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: value (type: int) @@ -3245,6 +3247,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: _col0 (type: int) @@ -3409,6 +3413,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: _col0 (type: int) @@ -3655,6 +3661,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3786,6 +3794,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3917,6 +3927,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -6300,6 +6312,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: value (type: int) @@ -7247,6 +7261,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: _col0 (type: int) @@ -7412,6 +7428,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: _col0 (type: int) @@ -7659,6 +7677,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -7791,6 +7811,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -7923,6 +7945,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out index 73d04a9..562d3e9 100644 --- ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out @@ -144,6 +144,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 8:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: t (type: tinyint), s (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out index 127d8ad..7b775eb 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out @@ -146,6 +146,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 8:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: t (type: tinyint), s (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out index 3bfbda0..4ee1521 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out @@ -60,6 +60,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -198,6 +200,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -362,6 +366,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -623,6 +629,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -978,6 +986,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -1013,6 +1023,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 5:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out index 9a2f5d8..17d61cb 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out @@ -72,6 +72,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -231,6 +233,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -390,6 +394,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -543,6 +549,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -696,6 +704,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -856,6 +866,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out index 6005fb2..7659a54 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out @@ -75,6 +75,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -277,6 +279,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -489,6 +493,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -603,13 +609,14 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 2:bigint - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_STAR keys: _col2 (type: bigint) mode: hash outputColumnNames: _col0, _col1 @@ -796,6 +803,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -910,13 +919,14 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 2:bigint - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_STAR keys: _col2 (type: bigint) mode: hash outputColumnNames: _col0, _col1 @@ -1099,6 +1109,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -1427,6 +1439,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -1748,6 +1762,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -1914,6 +1930,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -1987,13 +2005,14 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 2:bigint - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_STAR keys: _col2 (type: bigint) mode: hash outputColumnNames: _col0, _col1 @@ -2132,6 +2151,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out index d8e6b3f..30d80e9 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out @@ -82,6 +82,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -258,6 +260,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out index 3586eae..fe38089 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out @@ -90,6 +90,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -252,6 +254,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -414,6 +418,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -576,6 +582,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -732,6 +740,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, col 2:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string), c (type: string), 0L (type: bigint) @@ -884,10 +894,11 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 1104 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string) @@ -913,7 +924,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -1024,13 +1035,14 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashSerializeKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 6:double - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_STAR keys: _col0 (type: double) mode: hash outputColumnNames: _col0, _col1 @@ -1055,7 +1067,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out index b072ffc..160598b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out @@ -76,6 +76,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string) @@ -257,6 +259,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string) @@ -661,6 +665,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string), _col1 (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out index 74caa3f..0a169b1 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out @@ -83,6 +83,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -222,6 +224,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -387,6 +391,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: a (type: string), b (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out index b896193..bcaa3e1 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out @@ -84,6 +84,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 5:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -328,6 +330,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 5:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -603,6 +607,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out index 8da5735..e530987 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out @@ -76,6 +76,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string) @@ -146,6 +148,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 2:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -268,6 +272,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string) @@ -338,6 +344,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 2:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string), _col1 (type: string), 0L (type: bigint) @@ -487,6 +495,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string) @@ -557,6 +567,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string), _col1 (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out index 6c4ae65..c8a764b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out @@ -76,6 +76,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 5:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -219,6 +221,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 5:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out index 80e073b..7f02670 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out @@ -76,6 +76,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -237,6 +239,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -405,6 +409,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -574,6 +580,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int), 0L (type: bigint) @@ -780,6 +788,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -941,6 +951,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -1109,6 +1121,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -1271,6 +1285,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -1478,6 +1494,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int) @@ -1634,6 +1652,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int) @@ -1792,6 +1812,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int) @@ -1941,6 +1963,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -2107,6 +2131,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -2273,6 +2299,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) @@ -2434,6 +2462,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: int), value (type: int), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out index e67bca7..f6909f8 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out @@ -76,6 +76,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -277,6 +279,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -478,6 +482,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: a (type: string), b (type: string), 0L (type: bigint) @@ -677,6 +683,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, col 2:string, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string), b (type: string), c (type: string), 0L (type: bigint) @@ -868,10 +876,11 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 1104 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: a (type: string) @@ -898,7 +907,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -1051,13 +1060,14 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashSerializeKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 6:double - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] + singleCountAggreation: COUNT_STAR keys: _col0 (type: double) mode: hash outputColumnNames: _col0, _col1 @@ -1083,7 +1093,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out index dc3363d..c1100ba 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out @@ -74,6 +74,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, ConstantVectorExpression(val 0) -> 4:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: category (type: int), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out index 98e6e54..b823929 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out @@ -89,6 +89,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -104,10 +106,11 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: string) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out index d90ebf0..4417056 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out @@ -268,10 +268,11 @@ STAGE PLANS: Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 9:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ss_ticket_number (type: int) @@ -296,7 +297,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -467,10 +468,11 @@ STAGE PLANS: Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 9:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ss_ticket_number (type: int) @@ -494,7 +496,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -763,6 +765,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: ss_item_sk (type: int) @@ -829,6 +833,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: ConstantVectorExpression(val 1) -> 4:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: 1 (type: int) @@ -987,6 +993,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 9:int, col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: ss_ticket_number (type: int), ss_item_sk (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out index ef49d90..8f18045 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out @@ -72,6 +72,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -327,6 +329,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -673,6 +677,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 3:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) @@ -708,6 +714,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string, ConstantVectorExpression(val 0) -> 5:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string), val (type: string), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out index 79ca6d9..54356b5 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out @@ -77,6 +77,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -494,10 +496,11 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashSerializeKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 6:double - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: double) @@ -524,7 +527,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -569,6 +572,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -679,10 +684,11 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 150 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: ConstantVectorExpression(val 1) -> 4:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: 1 (type: int) @@ -709,7 +715,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -760,6 +766,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out index 6c6986e..a83eb4e 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out @@ -55,7 +55,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 6 Data size: 1128 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:key:string, 1:val:string, 2:ds:string, 3:ROW__ID:struct] @@ -66,7 +66,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] - Statistics: Num rows: 6 Data size: 1128 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: className: VectorGroupByOperator @@ -78,7 +78,7 @@ STAGE PLANS: keys: key (type: string) mode: final outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(_col0) Group By Vectorization: @@ -86,11 +86,13 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: Reduce Sink Vectorization: @@ -99,7 +101,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumnNums: [0] - Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap LLAP IO: no inputs @@ -147,13 +149,13 @@ STAGE PLANS: projectedOutputColumnNums: [0] mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out index ec3e2b8..f6262c9 100644 --- ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out +++ ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out @@ -170,6 +170,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string, ConstantVectorExpression(val 0) -> 30:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: s_store_id (type: string), 0L (type: bigint) @@ -301,6 +303,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string, ConstantVectorExpression(val 0) -> 30:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_inner_join.q.out ql/src/test/results/clientpositive/llap/vector_inner_join.q.out index bb555df..2f990b7 100644 --- ql/src/test/results/clientpositive/llap/vector_inner_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_inner_join.q.out @@ -313,10 +313,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -342,7 +343,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: diff --git ql/src/test/results/clientpositive/llap/vector_join30.q.out ql/src/test/results/clientpositive/llap/vector_join30.q.out index 5fb8258..ccc7132 100644 --- ql/src/test/results/clientpositive/llap/vector_join30.q.out +++ ql/src/test/results/clientpositive/llap/vector_join30.q.out @@ -128,6 +128,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -278,6 +280,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -499,6 +503,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -710,6 +716,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out index 17704e5..4f8f1e0 100644 --- ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out @@ -3389,9 +3389,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -3405,7 +3406,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -3502,9 +3503,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -3518,7 +3520,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -3617,9 +3619,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -3633,7 +3636,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -3730,6 +3733,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -3848,6 +3853,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -3928,9 +3935,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -3944,7 +3952,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 2 @@ -4044,6 +4052,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4151,9 +4161,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4167,7 +4178,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 2 @@ -4290,9 +4301,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4306,7 +4318,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4416,9 +4428,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4432,7 +4445,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4552,9 +4565,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4568,7 +4582,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4678,6 +4692,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator @@ -4796,9 +4812,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4812,7 +4829,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 4 @@ -4826,9 +4843,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4842,7 +4860,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4962,9 +4980,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -4978,7 +4997,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5105,9 +5124,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -5121,7 +5141,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5233,9 +5253,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -5249,7 +5270,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 5 @@ -5380,9 +5401,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -5396,7 +5418,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 5 @@ -5529,9 +5551,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -5545,7 +5568,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 5 @@ -5705,9 +5728,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkLongOperator @@ -5721,7 +5745,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 4 @@ -5869,9 +5893,10 @@ STAGE PLANS: className: VectorSelectOperator native: true Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH Reduce Sink Vectorization: className: VectorReduceSinkStringOperator @@ -5885,7 +5910,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true @@ -6009,10 +6034,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -6038,7 +6064,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -6211,10 +6237,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -6240,7 +6267,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -6415,10 +6442,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -6444,7 +6472,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -6623,6 +6651,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 0:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col1 (type: int), _col1 (type: int) @@ -6830,6 +6860,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -6966,10 +6998,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -6995,7 +7028,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -7177,6 +7210,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -7377,10 +7412,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -7406,7 +7442,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -7637,10 +7673,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -7666,7 +7703,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -7852,10 +7889,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -7882,7 +7920,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -8109,10 +8147,11 @@ STAGE PLANS: Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -8138,7 +8177,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -8325,6 +8364,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -8534,10 +8575,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -8563,7 +8605,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -8597,10 +8639,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -8626,7 +8669,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -8834,10 +8877,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -8863,7 +8907,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -9064,10 +9108,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -9093,7 +9138,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -9278,10 +9323,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -9307,7 +9353,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -9528,10 +9574,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -9557,7 +9604,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -9780,10 +9827,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -9809,7 +9857,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -10090,10 +10138,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -10119,7 +10168,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -10366,10 +10415,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string) @@ -10395,7 +10445,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -10529,10 +10579,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -10558,7 +10609,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -10732,10 +10783,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -10761,7 +10813,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -10937,10 +10989,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -10966,7 +11019,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -11146,6 +11199,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 0:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col1 (type: int), _col1 (type: int) @@ -11354,6 +11409,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -11490,10 +11547,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -11519,7 +11577,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -11702,6 +11760,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -11903,10 +11963,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -11932,7 +11993,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -12165,10 +12226,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -12194,7 +12256,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -12381,10 +12443,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -12411,7 +12474,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -12638,10 +12701,11 @@ STAGE PLANS: Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -12667,7 +12731,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -12855,6 +12919,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -13064,10 +13130,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -13093,7 +13160,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -13127,10 +13194,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -13156,7 +13224,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -13364,10 +13432,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -13393,7 +13462,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -13594,10 +13663,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -13623,7 +13693,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -13808,10 +13878,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -13837,7 +13908,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -14058,10 +14129,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -14087,7 +14159,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -14310,10 +14382,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -14339,7 +14412,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -14622,10 +14695,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -14651,7 +14725,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -14899,10 +14973,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string) @@ -14928,7 +15003,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -15062,10 +15137,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -15091,7 +15167,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -15265,10 +15341,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -15294,7 +15371,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -15470,10 +15547,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -15499,7 +15577,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -15679,6 +15757,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 0:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col1 (type: int), _col1 (type: int) @@ -15887,6 +15967,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -16023,10 +16105,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -16052,7 +16135,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -16235,6 +16318,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -16436,10 +16521,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -16465,7 +16551,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -16698,10 +16784,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -16727,7 +16814,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -16914,10 +17001,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -16944,7 +17032,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -17171,10 +17259,11 @@ STAGE PLANS: Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -17200,7 +17289,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -17388,6 +17477,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) @@ -17597,10 +17688,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -17626,7 +17718,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -17660,10 +17752,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -17689,7 +17782,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -17897,10 +17990,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -17926,7 +18020,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -18127,10 +18221,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -18156,7 +18251,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -18341,10 +18436,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -18370,7 +18466,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -18591,10 +18687,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -18620,7 +18717,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -18843,10 +18940,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -18872,7 +18970,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -19155,10 +19253,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -19184,7 +19283,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -19432,10 +19531,11 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string) @@ -19461,7 +19561,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: diff --git ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out index 37821fb..77ffad6 100644 --- ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out +++ ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out @@ -42,10 +42,11 @@ STAGE PLANS: Statistics: Num rows: 100 Data size: 400 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: l_partkey (type: int) @@ -70,7 +71,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 3 @@ -141,10 +142,11 @@ STAGE PLANS: Statistics: Num rows: 14 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -169,7 +171,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -314,10 +316,11 @@ STAGE PLANS: Statistics: Num rows: 100 Data size: 400 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: l_partkey (type: int) @@ -342,7 +345,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 3 @@ -419,6 +422,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 17:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out index 7a2cd54..a724aab 100644 --- ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out +++ ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out @@ -146,6 +146,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -272,6 +274,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out index 0b645ab..eec6639 100644 --- ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out +++ ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out @@ -1064,11 +1064,11 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 10:int) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashSerializeKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 9:double - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: double) @@ -1093,7 +1093,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1754,6 +1754,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 9:int, col 12:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: boolean) @@ -2202,11 +2204,11 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 11:int) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 10:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int) @@ -2231,7 +2233,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2341,11 +2343,11 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 10:int) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 9:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int) @@ -2370,7 +2372,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2480,11 +2482,11 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 12:int) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 11:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int) @@ -2509,7 +2511,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2720,11 +2722,11 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 13:int) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 12:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: int) @@ -2749,7 +2751,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out index e4bc4f0..a60f308 100644 --- ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out +++ ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out @@ -147,6 +147,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 7:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: bo (type: boolean) diff --git ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out index 4901e83..87c1e97 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out @@ -745,6 +745,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out index a841d4c..0775e3d 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out @@ -320,6 +320,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out index b1209d9..7c3703e 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out @@ -287,6 +287,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -506,6 +508,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:decimal(15,2), col 1:decimal(15,2) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: c1 (type: decimal(15,2)), c2 (type: decimal(15,2)) @@ -1587,6 +1591,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1806,6 +1812,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:decimal(7,2), col 1:decimal(7,2) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: c1 (type: decimal(7,2)), c2 (type: decimal(7,2)) diff --git ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out index 068453f..9444cfe 100644 --- ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out +++ ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out @@ -116,6 +116,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -286,6 +288,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -456,6 +460,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -613,6 +619,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -770,6 +778,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out index 687b4af..4aaaf38 100644 --- ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out +++ ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out @@ -449,11 +449,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 2:date - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: fl_date (type: date) @@ -478,7 +478,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1380,11 +1380,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:date - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: fl_date (type: date) @@ -1409,7 +1409,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2335,11 +2335,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashSerializeKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:timestamp - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: fl_time (type: timestamp) @@ -2364,7 +2364,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2874,11 +2874,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 2:date - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: fl_date (type: date) @@ -2903,7 +2903,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -3805,11 +3805,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:date - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: fl_date (type: date) @@ -3834,7 +3834,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4760,11 +4760,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashSerializeKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:timestamp - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: fl_time (type: timestamp) @@ -4789,7 +4789,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out index 568549d..b4b0241 100644 --- ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out @@ -86,6 +86,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: name (type: string), age (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out index 3f92327..b94dfa9 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out @@ -67,6 +67,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:double, col 2:decimal(20,10), col 3:decimal(23,14) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: cint (type: int), cdouble (type: double), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) diff --git ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out index bd42ed2..bb5f2cf 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out @@ -104,6 +104,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out index 8fb0752..98208e4 100644 --- ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out +++ ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out @@ -119,6 +119,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] mode: hash @@ -326,6 +328,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_string_concat.q.out ql/src/test/results/clientpositive/llap/vector_string_concat.q.out index 5b43765..5a745e0 100644 --- ql/src/test/results/clientpositive/llap/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/llap/vector_string_concat.q.out @@ -354,10 +354,11 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 106456 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 20:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string) @@ -382,7 +383,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/llap/vector_udf1.q.out ql/src/test/results/clientpositive/llap/vector_udf1.q.out index 9859824..7a0c3d7 100644 --- ql/src/test/results/clientpositive/llap/vector_udf1.q.out +++ ql/src/test/results/clientpositive/llap/vector_udf1.q.out @@ -2791,6 +2791,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -2934,6 +2936,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out index de30ca7..7d97aa6 100644 --- ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out +++ ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out @@ -57,11 +57,11 @@ STAGE PLANS: Group By Operator aggregations: count(_col1) Group By Vectorization: - aggregators: VectorUDAFCount(col 7:int) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashStringKeySingleCountColumnOperator groupByMode: HASH keyExpressions: col 0:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string) @@ -86,7 +86,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/llap/vector_windowing.q.out ql/src/test/results/clientpositive/llap/vector_windowing.q.out index c713303..5222385 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -234,6 +234,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 1:string, col 5:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: p_mfgr (type: string), p_name (type: string), p_size (type: int) @@ -442,6 +444,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 1:string, col 5:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: p_mfgr (type: string), p_name (type: string), p_size (type: int) @@ -3811,6 +3815,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string, col 2:string, col 5:int, col 7:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: p_name (type: string), p_mfgr (type: string), p_size (type: int), p_retailprice (type: double) @@ -4501,6 +4507,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 3:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: p_mfgr (type: string), p_brand (type: string) @@ -6055,6 +6063,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 1:string, col 5:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: p_mfgr (type: string), p_name (type: string), p_size (type: int) diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index 8dcb900..c58fbba 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -50,6 +50,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: key (type: string) @@ -293,6 +295,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: int) @@ -530,6 +534,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string, col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: string) diff --git ql/src/test/results/clientpositive/llap/vectorization_0.q.out ql/src/test/results/clientpositive/llap/vectorization_0.q.out index b2db5a5..cfb45f1 100644 --- ql/src/test/results/clientpositive/llap/vectorization_0.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_0.q.out @@ -54,6 +54,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -235,6 +237,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -566,6 +570,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -747,6 +753,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1078,6 +1086,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -1259,6 +1269,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1636,6 +1648,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_1.q.out ql/src/test/results/clientpositive/llap/vectorization_1.q.out index c87926c..2459461 100644 --- ql/src/test/results/clientpositive/llap/vectorization_1.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_1.q.out @@ -87,6 +87,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_12.q.out ql/src/test/results/clientpositive/llap/vectorization_12.q.out index 0ead6c4..278a146 100644 --- ql/src/test/results/clientpositive/llap/vectorization_12.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_12.q.out @@ -111,6 +111,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double, col 3:bigint, col 6:string, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col3 (type: double), _col0 (type: bigint), _col2 (type: string), _col1 (type: boolean) diff --git ql/src/test/results/clientpositive/llap/vectorization_13.q.out ql/src/test/results/clientpositive/llap/vectorization_13.q.out index d72c298..23ffd42 100644 --- ql/src/test/results/clientpositive/llap/vectorization_13.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_13.q.out @@ -113,6 +113,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) @@ -467,6 +469,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) diff --git ql/src/test/results/clientpositive/llap/vectorization_14.q.out ql/src/test/results/clientpositive/llap/vectorization_14.q.out index 7ae99a3..2899f14 100644 --- ql/src/test/results/clientpositive/llap/vectorization_14.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_14.q.out @@ -113,6 +113,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 4:float, col 5:double, col 8:timestamp, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col2 (type: string), _col1 (type: float), _col4 (type: double), _col0 (type: timestamp), _col3 (type: boolean) diff --git ql/src/test/results/clientpositive/llap/vectorization_15.q.out ql/src/test/results/clientpositive/llap/vectorization_15.q.out index 31363df..150d76c 100644 --- ql/src/test/results/clientpositive/llap/vectorization_15.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_15.q.out @@ -109,6 +109,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 4:float, col 10:boolean, col 5:double, col 6:string, col 0:tinyint, col 2:int, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) diff --git ql/src/test/results/clientpositive/llap/vectorization_16.q.out ql/src/test/results/clientpositive/llap/vectorization_16.q.out index 59f2d10..d1e3ee8 100644 --- ql/src/test/results/clientpositive/llap/vectorization_16.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_16.q.out @@ -86,6 +86,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/llap/vectorization_2.q.out ql/src/test/results/clientpositive/llap/vectorization_2.q.out index 83833da..45a4452 100644 --- ql/src/test/results/clientpositive/llap/vectorization_2.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_2.q.out @@ -91,6 +91,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_3.q.out ql/src/test/results/clientpositive/llap/vectorization_3.q.out index 3c502cd..eaf26f3 100644 --- ql/src/test/results/clientpositive/llap/vectorization_3.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_3.q.out @@ -96,6 +96,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_4.q.out ql/src/test/results/clientpositive/llap/vectorization_4.q.out index a8cfa48..5ba9b03 100644 --- ql/src/test/results/clientpositive/llap/vectorization_4.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_4.q.out @@ -91,6 +91,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_5.q.out ql/src/test/results/clientpositive/llap/vectorization_5.q.out index 5124740..a3ea0ad 100644 --- ql/src/test/results/clientpositive/llap/vectorization_5.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_5.q.out @@ -84,6 +84,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_9.q.out ql/src/test/results/clientpositive/llap/vectorization_9.q.out index 59f2d10..d1e3ee8 100644 --- ql/src/test/results/clientpositive/llap/vectorization_9.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_9.q.out @@ -86,6 +86,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/llap/vectorization_limit.q.out ql/src/test/results/clientpositive/llap/vectorization_limit.q.out index 7be4d7d..7018996 100644 --- ql/src/test/results/clientpositive/llap/vectorization_limit.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_limit.q.out @@ -291,6 +291,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: tinyint) @@ -491,10 +493,11 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:tinyint - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ctinyint (type: tinyint) @@ -522,7 +525,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -655,6 +658,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 5:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ctinyint (type: tinyint), cdouble (type: double) @@ -892,6 +897,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: cdouble (type: double) diff --git ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out index e6427fa..4e34429 100644 --- ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out @@ -44,6 +44,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out index 80c7c0c..ec2f54f 100644 --- ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out @@ -70,15 +70,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_part - Statistics: Num rows: 200 Data size: 1592 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: (cdouble + 2.0D) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap LLAP IO: all inputs @@ -103,13 +103,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: PARTIAL Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: PARTIAL File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: PARTIAL table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out index 3a5c272..cd13d41 100644 --- ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out @@ -118,6 +118,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] mode: hash @@ -380,6 +382,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] mode: hash @@ -634,6 +638,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] mode: hash @@ -867,6 +873,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash @@ -2202,6 +2210,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:smallint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7] keys: _col0 (type: smallint) @@ -2479,6 +2489,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] keys: _col0 (type: double) @@ -2800,6 +2812,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 8:timestamp, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] keys: _col0 (type: timestamp), _col1 (type: string) @@ -3202,6 +3216,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] keys: _col0 (type: boolean) @@ -3440,6 +3456,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3555,6 +3573,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3742,6 +3762,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3857,6 +3879,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3972,6 +3996,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -4087,6 +4113,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -4202,6 +4230,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -4317,6 +4347,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_case.q.out ql/src/test/results/clientpositive/llap/vectorized_case.q.out index aec161d..93fd3b6 100644 --- ql/src/test/results/clientpositive/llap/vectorized_case.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_case.q.out @@ -306,6 +306,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -450,6 +452,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out index 8351192..d7dbb25 100644 --- ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out @@ -1262,6 +1262,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out index f19e2ca..2f1ca8c 100644 --- ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out @@ -73,6 +73,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -200,10 +202,11 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 2:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: cint (type: int) @@ -229,7 +232,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -282,6 +285,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index 15b62c9..9b26a8b 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -85,7 +85,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -308,7 +308,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -625,7 +625,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 6 @@ -669,7 +669,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1068,7 +1068,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1380,7 +1380,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1690,7 +1690,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1848,7 +1848,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2305,7 +2305,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2440,7 +2440,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2785,7 +2785,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -2943,7 +2943,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -3055,7 +3055,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 4 @@ -3219,7 +3219,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -3361,7 +3361,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 6 @@ -3405,7 +3405,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4250,7 +4250,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 10 @@ -4308,7 +4308,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 7 @@ -4644,7 +4644,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -4820,7 +4820,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 4 @@ -4864,7 +4864,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5045,7 +5045,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5208,7 +5208,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5360,7 +5360,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5512,7 +5512,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5628,7 +5628,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -5826,7 +5826,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -6204,7 +6204,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 4 @@ -6248,7 +6248,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -6536,7 +6536,7 @@ STAGE PLANS: vectorizationSupportRemovedReasons: [DECIMAL_64 disabled because LLAP is enabled] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 3 diff --git ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out index 8abd234..0815ef6 100644 --- ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out @@ -75,6 +75,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out index f05e5c0..c1ab83a 100644 --- ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out @@ -206,6 +206,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -409,6 +411,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -612,6 +616,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out index acb9126..ad11a9b 100644 --- ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_parquet_types.q.out @@ -298,6 +298,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: _col0 (type: tinyint) diff --git ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out index ccf9aae..5a0d8e5 100644 --- ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out @@ -3542,6 +3542,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 3:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: p_mfgr (type: string), p_brand (type: string) diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out index 46a2470..c8df117 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out @@ -146,6 +146,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -371,6 +373,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -512,6 +516,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] mode: hash diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out index c9dd434..9dd16d7 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out @@ -806,6 +806,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -933,6 +935,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1078,6 +1082,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_0.q.out ql/src/test/results/clientpositive/parquet_vectorization_0.q.out index fbb78b1..8bfe2e5 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_0.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_0.q.out @@ -48,6 +48,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -214,6 +216,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -536,6 +540,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -702,6 +708,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1024,6 +1032,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -1190,6 +1200,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1558,6 +1570,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_1.q.out ql/src/test/results/clientpositive/parquet_vectorization_1.q.out index afada38..b77bfd9 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_1.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_1.q.out @@ -81,6 +81,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_12.q.out ql/src/test/results/clientpositive/parquet_vectorization_12.q.out index c284977..8f20fee 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_12.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_12.q.out @@ -105,6 +105,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double, col 3:bigint, col 6:string, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col3 (type: double), _col0 (type: bigint), _col2 (type: string), _col1 (type: boolean) diff --git ql/src/test/results/clientpositive/parquet_vectorization_13.q.out ql/src/test/results/clientpositive/parquet_vectorization_13.q.out index 6dd6e3f..d15fc1f 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_13.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_13.q.out @@ -107,6 +107,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) @@ -437,6 +439,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) diff --git ql/src/test/results/clientpositive/parquet_vectorization_14.q.out ql/src/test/results/clientpositive/parquet_vectorization_14.q.out index c501fab..b0ebe5b 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_14.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_14.q.out @@ -107,6 +107,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 4:float, col 5:double, col 8:timestamp, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col2 (type: string), _col1 (type: float), _col4 (type: double), _col0 (type: timestamp), _col3 (type: boolean) diff --git ql/src/test/results/clientpositive/parquet_vectorization_15.q.out ql/src/test/results/clientpositive/parquet_vectorization_15.q.out index 39057d6..cc331cb 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_15.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_15.q.out @@ -103,6 +103,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 4:float, col 10:boolean, col 5:double, col 6:string, col 0:tinyint, col 2:int, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) diff --git ql/src/test/results/clientpositive/parquet_vectorization_16.q.out ql/src/test/results/clientpositive/parquet_vectorization_16.q.out index cf06c91..e18ed22 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_16.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_16.q.out @@ -80,6 +80,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/parquet_vectorization_2.q.out ql/src/test/results/clientpositive/parquet_vectorization_2.q.out index 131797d..7384c01 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_2.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_2.q.out @@ -85,6 +85,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_3.q.out ql/src/test/results/clientpositive/parquet_vectorization_3.q.out index f98dea6..f77d1bc 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_3.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_3.q.out @@ -90,6 +90,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_4.q.out ql/src/test/results/clientpositive/parquet_vectorization_4.q.out index 973e2bd..8826480 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_4.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_4.q.out @@ -85,6 +85,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_5.q.out ql/src/test/results/clientpositive/parquet_vectorization_5.q.out index e20dcbf..02054ff 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_5.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_5.q.out @@ -78,6 +78,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/parquet_vectorization_9.q.out ql/src/test/results/clientpositive/parquet_vectorization_9.q.out index cf06c91..e18ed22 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_9.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_9.q.out @@ -80,6 +80,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out index 8a81b34..376ef9d 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out @@ -229,6 +229,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: tinyint) @@ -361,6 +363,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ctinyint (type: tinyint) @@ -616,6 +620,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: cdouble (type: double) diff --git ql/src/test/results/clientpositive/spark/vector_between_in.q.out ql/src/test/results/clientpositive/spark/vector_between_in.q.out index 9f5fa2a..7c2441a 100644 --- ql/src/test/results/clientpositive/spark/vector_between_in.q.out +++ ql/src/test/results/clientpositive/spark/vector_between_in.q.out @@ -163,6 +163,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -363,6 +365,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -749,6 +753,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1094,11 +1100,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1122,7 +1128,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1230,11 +1236,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1258,7 +1264,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1366,11 +1372,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1394,7 +1400,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: true vectorized: true Reducer 2 @@ -1502,11 +1508,11 @@ STAGE PLANS: Group By Operator aggregations: count() Group By Vectorization: - aggregators: VectorUDAFCountStar(*) -> bigint - className: VectorGroupByOperator + className: VectorGroupByHashLongKeySingleCountStarOperator groupByMode: HASH keyExpressions: col 5:boolean - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: boolean) @@ -1530,7 +1536,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: true vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out index 83d5a62..eff91d1 100644 --- ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out +++ ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out @@ -148,6 +148,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5] keys: _col0 (type: int) diff --git ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out index 1444cd8..a2934ef 100644 --- ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out +++ ql/src/test/results/clientpositive/spark/vector_count_distinct.q.out @@ -1264,10 +1264,11 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 3504000 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 16:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ws_order_number (type: int) @@ -1290,7 +1291,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -1321,6 +1322,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/spark/vector_data_types.q.out ql/src/test/results/clientpositive/spark/vector_data_types.q.out index 310a23a..a3cd0c2 100644 --- ql/src/test/results/clientpositive/spark/vector_data_types.q.out +++ ql/src/test/results/clientpositive/spark/vector_data_types.q.out @@ -374,6 +374,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out index d37a27e..daf08be 100644 --- ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out @@ -87,6 +87,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: cint (type: int) @@ -265,6 +267,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] keys: _col0 (type: int) @@ -477,6 +481,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: cint (type: int) @@ -674,6 +680,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] keys: _col0 (type: int) diff --git ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out index 0236980..4464d81 100644 --- ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out +++ ql/src/test/results/clientpositive/spark/vector_distinct_2.q.out @@ -143,6 +143,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 8:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: t (type: tinyint), s (type: string) diff --git ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out index a68002e..76c4f42 100644 --- ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out +++ ql/src/test/results/clientpositive/spark/vector_groupby_3.q.out @@ -145,6 +145,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 8:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: t (type: tinyint), s (type: string) diff --git ql/src/test/results/clientpositive/spark/vector_inner_join.q.out ql/src/test/results/clientpositive/spark/vector_inner_join.q.out index 168aa77..a801c5a 100644 --- ql/src/test/results/clientpositive/spark/vector_inner_join.q.out +++ ql/src/test/results/clientpositive/spark/vector_inner_join.q.out @@ -193,12 +193,12 @@ PREHOOK: query: select t1.a from orc_table_2a t2 join orc_table_1a t1 on t1.a = PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1a PREHOOK: Input: default@orc_table_2a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.a from orc_table_2a t2 join orc_table_1a t1 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1a POSTHOOK: Input: default@orc_table_2a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 3 PREHOOK: query: explain vectorization detail select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 on t1.a = t2.c where t2.c > 2 @@ -245,10 +245,11 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -269,7 +270,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: @@ -365,12 +366,12 @@ PREHOOK: query: select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1a PREHOOK: Input: default@orc_table_2a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 on t1.a = t2.c where t2.c > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1a POSTHOOK: Input: default@orc_table_2a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 3 PREHOOK: query: CREATE TABLE orc_table_1b(v1 STRING, a INT) STORED AS ORC PREHOOK: type: CREATETABLE @@ -569,12 +570,12 @@ PREHOOK: query: select t1.v1, t1.a from orc_table_2b t2 join orc_table_1b t1 on PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t1.a from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### three 3 PREHOOK: query: explain vectorization detail select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 @@ -730,12 +731,12 @@ PREHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_ta PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### three 3 3 THREE PREHOOK: query: explain vectorization detail select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 @@ -900,12 +901,12 @@ PREHOOK: query: select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join or PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### three 6 15 THREE PREHOOK: query: explain vectorization detail select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 @@ -1069,12 +1070,12 @@ PREHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### three THREE 3 PREHOOK: query: explain vectorization detail select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 @@ -1238,12 +1239,12 @@ PREHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 3 three THREE PREHOOK: query: explain vectorization detail select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2 @@ -1407,12 +1408,12 @@ PREHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### three THREE 3 PREHOOK: query: explain vectorization detail select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2 @@ -1576,10 +1577,10 @@ PREHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1b PREHOOK: Input: default@orc_table_2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1b POSTHOOK: Input: default@orc_table_2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 3 three THREE diff --git ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out index ff1af2c..5fe324e 100644 --- ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out +++ ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out @@ -89,10 +89,11 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 5999 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 0:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int) @@ -113,7 +114,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Local Work: @@ -141,10 +142,11 @@ STAGE PLANS: Statistics: Num rows: 100 Data size: 11999 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: l_partkey (type: int) @@ -167,7 +169,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 @@ -367,6 +369,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 17:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int) @@ -415,10 +419,11 @@ STAGE PLANS: Statistics: Num rows: 100 Data size: 11999 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashLongKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 1:int - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: l_partkey (type: int) @@ -441,7 +446,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.mapred.TextInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out index e6546c5..250560c 100644 --- ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out +++ ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out @@ -146,6 +146,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 7:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: bo (type: boolean) diff --git ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out index bc9d102..df5339a 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out @@ -37,11 +37,11 @@ POSTHOOK: Lineage: orc_table_2.v2 SCRIPT [] PREHOOK: query: select * from orc_table_1 PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from orc_table_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL one 1 @@ -51,11 +51,11 @@ two 2 PREHOOK: query: select * from orc_table_2 PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from orc_table_2 POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 0 ZERO 2 TWO 3 THREE @@ -203,12 +203,12 @@ PREHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 left outer j PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1 PREHOOK: Input: default@orc_table_2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 left outer join orc_table_2 t2 on t1.a = t2.c POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1 POSTHOOK: Input: default@orc_table_2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL one 1 NULL NULL @@ -356,12 +356,12 @@ PREHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 right outer PREHOOK: type: QUERY PREHOOK: Input: default@orc_table_1 PREHOOK: Input: default@orc_table_2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 right outer join orc_table_2 t2 on t1.a = t2.c POSTHOOK: type: QUERY POSTHOOK: Input: default@orc_table_1 POSTHOOK: Input: default@orc_table_2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL 0 ZERO NULL NULL 4 FOUR NULL NULL NULL diff --git ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out index 9a1fa53..b3046f3 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out @@ -89,11 +89,11 @@ POSTHOOK: Lineage: small_alltypesorc4a.ctinyint SIMPLE [] PREHOOK: query: select * from small_alltypesorc1a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc1a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc1a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL @@ -102,11 +102,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc2a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc2a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc2a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true @@ -115,11 +115,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc3a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc3a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc3a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false @@ -128,11 +128,11 @@ NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 19 PREHOOK: query: select * from small_alltypesorc4a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc4a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc4a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: create table small_alltypesorc_a stored as orc as select * from (select * from (select * from small_alltypesorc1a) sq1 union all @@ -187,20 +187,20 @@ PREHOOK: query: ANALYZE TABLE small_alltypesorc_a COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a PREHOOK: Output: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE small_alltypesorc_a COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a POSTHOOK: Output: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: select * from small_alltypesorc_a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc_a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL @@ -365,14 +365,14 @@ left outer join small_alltypesorc_a cd on cd.cint = c.cint PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc_a c left outer join small_alltypesorc_a cd on cd.cint = c.cint POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL @@ -539,14 +539,14 @@ left outer join small_alltypesorc_a hd on hd.ctinyint = c.ctinyint PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select c.ctinyint from small_alltypesorc_a c left outer join small_alltypesorc_a hd on hd.ctinyint = c.ctinyint POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -64 -64 @@ -832,6 +832,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -919,7 +921,7 @@ left outer join small_alltypesorc_a hd ) t1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*), sum(t1.c_ctinyint) from (select c.ctinyint as c_ctinyint from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -929,5 +931,5 @@ left outer join small_alltypesorc_a hd ) t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 145 -8960 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out index 32bcc9b..9654e20 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out @@ -89,11 +89,11 @@ POSTHOOK: Lineage: small_alltypesorc4a.ctinyint SIMPLE [(alltypesorc)alltypesorc PREHOOK: query: select * from small_alltypesorc1a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc1a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc1a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false @@ -102,11 +102,11 @@ NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 19 PREHOOK: query: select * from small_alltypesorc2a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc2a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc2a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true @@ -115,11 +115,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc3a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc3a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc3a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL @@ -128,11 +128,11 @@ NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.58 PREHOOK: query: select * from small_alltypesorc4a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc4a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc4a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -60 -200 NULL NULL -60.0 -200.0 NULL NULL 1969-12-31 16:00:11.996 1969-12-31 15:59:55.451 NULL NULL -61 -7196 NULL NULL -61.0 -7196.0 NULL 8Mlns2Tl6E0g 1969-12-31 15:59:44.823 1969-12-31 15:59:58.174 NULL false -61 -7196 NULL NULL -61.0 -7196.0 NULL fUJIN 1969-12-31 16:00:11.842 1969-12-31 15:59:58.174 NULL false @@ -192,20 +192,20 @@ PREHOOK: query: ANALYZE TABLE small_alltypesorc_a COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a PREHOOK: Output: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE small_alltypesorc_a COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a POSTHOOK: Output: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: select * from small_alltypesorc_a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc_a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -60 -200 NULL NULL -60.0 -200.0 NULL NULL 1969-12-31 16:00:11.996 1969-12-31 15:59:55.451 NULL NULL -61 -7196 NULL NULL -61.0 -7196.0 NULL 8Mlns2Tl6E0g 1969-12-31 15:59:44.823 1969-12-31 15:59:58.174 NULL false -61 -7196 NULL NULL -61.0 -7196.0 NULL fUJIN 1969-12-31 16:00:11.842 1969-12-31 15:59:58.174 NULL false @@ -406,6 +406,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -493,7 +495,7 @@ left outer join small_alltypesorc_a hd ) t1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*), sum(t1.c_cbigint) from (select c.cbigint as c_cbigint from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -503,5 +505,5 @@ left outer join small_alltypesorc_a hd ) t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 34 -26289186744 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out index c5568b6..f8d1ec2 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out @@ -89,11 +89,11 @@ POSTHOOK: Lineage: small_alltypesorc4a.ctinyint SIMPLE [(alltypesorc)alltypesorc PREHOOK: query: select * from small_alltypesorc1a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc1a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc1a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false @@ -102,11 +102,11 @@ NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 19 PREHOOK: query: select * from small_alltypesorc2a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc2a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc2a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -51 NULL NULL -1731061911 -51.0 NULL Pw53BBJ yL443x2437PO5Hv1U3lCjq2D 1969-12-31 16:00:08.451 NULL true false -51 NULL NULL -1846191223 -51.0 NULL Ul085f84S33Xd32u x1JC58g0Ukp 1969-12-31 16:00:08.451 NULL true true -51 NULL NULL -1874052220 -51.0 NULL c61B47I604gymFJ sjWQS78 1969-12-31 16:00:08.451 NULL false false @@ -115,11 +115,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc3a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc3a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc3a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -51 NULL -31312632 1086455747 -51.0 NULL NULL Bc7xt12568c451o64LF5 1969-12-31 16:00:08.451 NULL NULL true -51 NULL -337975743 608681041 -51.0 NULL NULL Ih2r28o6 1969-12-31 16:00:08.451 NULL NULL true -51 NULL -413196097 -306198070 -51.0 NULL NULL F53QcSDPpxYF1Ub 1969-12-31 16:00:08.451 NULL NULL false @@ -128,11 +128,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc4a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc4a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc4a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true @@ -192,20 +192,20 @@ PREHOOK: query: ANALYZE TABLE small_alltypesorc_a COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a PREHOOK: Output: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE small_alltypesorc_a COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a POSTHOOK: Output: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: select * from small_alltypesorc_a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc_a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -51 NULL -31312632 1086455747 -51.0 NULL NULL Bc7xt12568c451o64LF5 1969-12-31 16:00:08.451 NULL NULL true -51 NULL -337975743 608681041 -51.0 NULL NULL Ih2r28o6 1969-12-31 16:00:08.451 NULL NULL true -51 NULL -413196097 -306198070 -51.0 NULL NULL F53QcSDPpxYF1Ub 1969-12-31 16:00:08.451 NULL NULL false @@ -254,7 +254,7 @@ left outer join small_alltypesorc_a hd ) t1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -264,7 +264,7 @@ left outer join small_alltypesorc_a hd ) t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 20 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 @@ -294,7 +294,7 @@ left outer join small_alltypesorc_a hd ) t1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -304,7 +304,7 @@ left outer join small_alltypesorc_a hd ) t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 28 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 @@ -334,7 +334,7 @@ left outer join small_alltypesorc_a hd ) t1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_a -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -344,5 +344,5 @@ left outer join small_alltypesorc_a hd ) t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 28 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out index 9872ab1..a55250b 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out @@ -89,11 +89,11 @@ POSTHOOK: Lineage: small_alltypesorc4b.ctinyint SIMPLE [] PREHOOK: query: select * from small_alltypesorc1b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc1b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc1b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -3097 253665376 NULL -64.0 -3097.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.013 1969-12-31 16:00:06.097 true NULL @@ -107,11 +107,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc2b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc2b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc2b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -200 NULL -1809444706 -64.0 -200.0 NULL B87YVb3UASqg 1969-12-31 16:00:10.858 1969-12-31 15:59:55.451 NULL true -64 -200 NULL 2118653994 -64.0 -200.0 NULL ONHGSDy1U4Ft431DfQp15 1969-12-31 16:00:03.944 1969-12-31 15:59:55.451 NULL true -64 -200 NULL 927647669 -64.0 -200.0 NULL DhxkBT 1969-12-31 16:00:00.199 1969-12-31 15:59:55.451 NULL false @@ -125,11 +125,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: select * from small_alltypesorc3b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc3b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc3b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false @@ -143,11 +143,11 @@ NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 19 PREHOOK: query: select * from small_alltypesorc4b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc4b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc4b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: create table small_alltypesorc_b stored as orc as select * from (select * from (select * from small_alltypesorc1b) sq1 union all @@ -202,20 +202,20 @@ PREHOOK: query: ANALYZE TABLE small_alltypesorc_b COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_b PREHOOK: Output: default@small_alltypesorc_b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE small_alltypesorc_b COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_b POSTHOOK: Output: default@small_alltypesorc_b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: select * from small_alltypesorc_b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc_b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL -64 -200 NULL -1809444706 -64.0 -200.0 NULL B87YVb3UASqg 1969-12-31 16:00:10.858 1969-12-31 15:59:55.451 NULL true @@ -265,14 +265,14 @@ left outer join small_alltypesorc_b cd on cd.cint = c.cint PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select * from small_alltypesorc_b c left outer join small_alltypesorc_b cd on cd.cint = c.cint POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -3586 626923679 NULL -64.0 -3586.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.952 1969-12-31 15:59:51.131 true NULL -64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL -64 -4018 626923679 NULL -64.0 -4018.0 821UdmGbkEf4j NULL 1969-12-31 15:59:58.959 1969-12-31 16:00:07.803 true NULL @@ -346,14 +346,14 @@ left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### -64 -64 -64 @@ -792,7 +792,7 @@ left outer join small_alltypesorc_b hd ) t1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc_b -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b cd @@ -802,5 +802,5 @@ left outer join small_alltypesorc_b hd ) t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_b -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 890 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out index baf7204..680ee42 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join5.q.out @@ -28,12 +28,12 @@ PREHOOK: query: ANALYZE TABLE sorted_mod_4 COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@sorted_mod_4 PREHOOK: Output: default@sorted_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE sorted_mod_4 COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@sorted_mod_4 POSTHOOK: Output: default@sorted_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: create table small_table stored as orc as select ctinyint, cbigint from alltypesorc limit 100 PREHOOK: type: CREATETABLE_AS_SELECT @@ -60,12 +60,12 @@ PREHOOK: query: ANALYZE TABLE small_table COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@small_table PREHOOK: Output: default@small_table -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE small_table COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table POSTHOOK: Output: default@small_table -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.*, st.* from sorted_mod_4 s @@ -89,7 +89,7 @@ on s.ctinyint = st.ctinyint PREHOOK: type: QUERY PREHOOK: Input: default@small_table PREHOOK: Input: default@sorted_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.*, st.* from sorted_mod_4 s left outer join small_table st @@ -98,7 +98,7 @@ on s.ctinyint = st.ctinyint POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table POSTHOOK: Input: default@sorted_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 6876 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.ctinyint, s.cmodint, sm.cbigint @@ -123,7 +123,7 @@ on s.ctinyint = sm.ctinyint and s.cmodint = 2 PREHOOK: type: QUERY PREHOOK: Input: default@small_table PREHOOK: Input: default@sorted_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.ctinyint, s.cmodint, sm.cbigint from sorted_mod_4 s left outer join small_table sm @@ -132,7 +132,7 @@ on s.ctinyint = sm.ctinyint and s.cmodint = 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table POSTHOOK: Input: default@sorted_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 6058 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.ctinyint, s.cmodint, sm.cbigint @@ -157,7 +157,7 @@ on s.ctinyint = sm.ctinyint and pmod(s.ctinyint, 4) = s.cmodint PREHOOK: type: QUERY PREHOOK: Input: default@small_table PREHOOK: Input: default@sorted_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.ctinyint, s.cmodint, sm.cbigint from sorted_mod_4 s left outer join small_table sm @@ -166,7 +166,7 @@ on s.ctinyint = sm.ctinyint and pmod(s.ctinyint, 4) = s.cmodint POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table POSTHOOK: Input: default@sorted_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 6248 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.ctinyint, s.cmodint, sm.cbigint @@ -191,7 +191,7 @@ on s.ctinyint = sm.ctinyint and s.ctinyint < 100 PREHOOK: type: QUERY PREHOOK: Input: default@small_table PREHOOK: Input: default@sorted_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.ctinyint, s.cmodint, sm.cbigint from sorted_mod_4 s left outer join small_table sm @@ -200,7 +200,7 @@ on s.ctinyint = sm.ctinyint and s.ctinyint < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table POSTHOOK: Input: default@sorted_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 6876 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.*, sm.*, s2.* @@ -231,7 +231,7 @@ left outer join sorted_mod_4 s2 PREHOOK: type: QUERY PREHOOK: Input: default@small_table PREHOOK: Input: default@sorted_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.*, sm.*, s2.* from sorted_mod_4 s left outer join small_table sm @@ -242,7 +242,7 @@ left outer join sorted_mod_4 s2 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table POSTHOOK: Input: default@sorted_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 3268334 PREHOOK: query: create table mod_8_mod_4 stored as orc as select pmod(ctinyint, 8) as cmodtinyint, pmod(cint, 4) as cmodint from alltypesorc @@ -272,12 +272,12 @@ PREHOOK: query: ANALYZE TABLE mod_8_mod_4 COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@mod_8_mod_4 PREHOOK: Output: default@mod_8_mod_4 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE mod_8_mod_4 COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@mod_8_mod_4 POSTHOOK: Output: default@mod_8_mod_4 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: create table small_table2 stored as orc as select pmod(ctinyint, 16) as cmodtinyint, cbigint from alltypesorc limit 100 PREHOOK: type: CREATETABLE_AS_SELECT @@ -304,12 +304,12 @@ PREHOOK: query: ANALYZE TABLE small_table2 COMPUTE STATISTICS FOR COLUMNS PREHOOK: type: QUERY PREHOOK: Input: default@small_table2 PREHOOK: Output: default@small_table2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: ANALYZE TABLE small_table2 COMPUTE STATISTICS FOR COLUMNS POSTHOOK: type: QUERY POSTHOOK: Input: default@small_table2 POSTHOOK: Output: default@small_table2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.*, st.* from mod_8_mod_4 s @@ -333,7 +333,7 @@ on s.cmodtinyint = st.cmodtinyint PREHOOK: type: QUERY PREHOOK: Input: default@mod_8_mod_4 PREHOOK: Input: default@small_table2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.*, st.* from mod_8_mod_4 s left outer join small_table2 st @@ -342,7 +342,7 @@ on s.cmodtinyint = st.cmodtinyint POSTHOOK: type: QUERY POSTHOOK: Input: default@mod_8_mod_4 POSTHOOK: Input: default@small_table2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 39112 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.cmodtinyint, s.cmodint, sm.cbigint @@ -367,7 +367,7 @@ on s.cmodtinyint = sm.cmodtinyint and s.cmodint = 2 PREHOOK: type: QUERY PREHOOK: Input: default@mod_8_mod_4 PREHOOK: Input: default@small_table2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.cmodtinyint, s.cmodint, sm.cbigint from mod_8_mod_4 s left outer join small_table2 sm @@ -376,7 +376,7 @@ on s.cmodtinyint = sm.cmodtinyint and s.cmodint = 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@mod_8_mod_4 POSTHOOK: Input: default@small_table2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 11171 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.cmodtinyint, s.cmodint, sm.cbigint @@ -401,7 +401,7 @@ on s.cmodtinyint = sm.cmodtinyint and pmod(s.cmodtinyint, 4) = s.cmodint PREHOOK: type: QUERY PREHOOK: Input: default@mod_8_mod_4 PREHOOK: Input: default@small_table2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.cmodtinyint, s.cmodint, sm.cbigint from mod_8_mod_4 s left outer join small_table2 sm @@ -410,7 +410,7 @@ on s.cmodtinyint = sm.cmodtinyint and pmod(s.cmodtinyint, 4) = s.cmodint POSTHOOK: type: QUERY POSTHOOK: Input: default@mod_8_mod_4 POSTHOOK: Input: default@small_table2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 14371 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.cmodtinyint, s.cmodint, sm.cbigint @@ -435,7 +435,7 @@ on s.cmodtinyint = sm.cmodtinyint and s.cmodtinyint < 3 PREHOOK: type: QUERY PREHOOK: Input: default@mod_8_mod_4 PREHOOK: Input: default@small_table2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.cmodtinyint, s.cmodint, sm.cbigint from mod_8_mod_4 s left outer join small_table2 sm @@ -444,7 +444,7 @@ on s.cmodtinyint = sm.cmodtinyint and s.cmodtinyint < 3 POSTHOOK: type: QUERY POSTHOOK: Input: default@mod_8_mod_4 POSTHOOK: Input: default@small_table2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 17792 PREHOOK: query: explain vectorization detail formatted select count(*) from (select s.*, sm.*, s2.* @@ -475,7 +475,7 @@ left outer join mod_8_mod_4 s2 PREHOOK: type: QUERY PREHOOK: Input: default@mod_8_mod_4 PREHOOK: Input: default@small_table2 -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select count(*) from (select s.*, sm.*, s2.* from mod_8_mod_4 s left outer join small_table2 sm @@ -486,5 +486,5 @@ left outer join mod_8_mod_4 s2 POSTHOOK: type: QUERY POSTHOOK: Input: default@mod_8_mod_4 POSTHOOK: Input: default@small_table2 -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### 6524438 diff --git ql/src/test/results/clientpositive/spark/vector_string_concat.q.out ql/src/test/results/clientpositive/spark/vector_string_concat.q.out index 30dbaf1..bb6a956 100644 --- ql/src/test/results/clientpositive/spark/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/spark/vector_string_concat.q.out @@ -352,10 +352,11 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: - className: VectorGroupByOperator + className: VectorGroupByHashStringKeyDuplicateReductionOperator groupByMode: HASH keyExpressions: col 20:string - native: false + native: true + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string) @@ -379,7 +380,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/spark/vectorization_0.q.out ql/src/test/results/clientpositive/spark/vectorization_0.q.out index c3201bf..170a956 100644 --- ql/src/test/results/clientpositive/spark/vectorization_0.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_0.q.out @@ -53,6 +53,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -232,6 +234,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -559,6 +563,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -738,6 +744,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1065,6 +1073,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -1244,6 +1254,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1617,6 +1629,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_1.q.out ql/src/test/results/clientpositive/spark/vectorization_1.q.out index 71625e0..3fc7f3e 100644 --- ql/src/test/results/clientpositive/spark/vectorization_1.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_1.q.out @@ -86,6 +86,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_12.q.out ql/src/test/results/clientpositive/spark/vectorization_12.q.out index 24cfa4e..8e88ae3 100644 --- ql/src/test/results/clientpositive/spark/vectorization_12.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_12.q.out @@ -110,6 +110,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double, col 3:bigint, col 6:string, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col3 (type: double), _col0 (type: bigint), _col2 (type: string), _col1 (type: boolean) diff --git ql/src/test/results/clientpositive/spark/vectorization_13.q.out ql/src/test/results/clientpositive/spark/vectorization_13.q.out index c2a8006..3a0c3b7 100644 --- ql/src/test/results/clientpositive/spark/vectorization_13.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_13.q.out @@ -112,6 +112,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) @@ -464,6 +466,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) diff --git ql/src/test/results/clientpositive/spark/vectorization_14.q.out ql/src/test/results/clientpositive/spark/vectorization_14.q.out index 95bf29b..aa679ef 100644 --- ql/src/test/results/clientpositive/spark/vectorization_14.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_14.q.out @@ -112,6 +112,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 4:float, col 5:double, col 8:timestamp, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col2 (type: string), _col1 (type: float), _col4 (type: double), _col0 (type: timestamp), _col3 (type: boolean) diff --git ql/src/test/results/clientpositive/spark/vectorization_15.q.out ql/src/test/results/clientpositive/spark/vectorization_15.q.out index d0b03b3..4c11185 100644 --- ql/src/test/results/clientpositive/spark/vectorization_15.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_15.q.out @@ -108,6 +108,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 4:float, col 10:boolean, col 5:double, col 6:string, col 0:tinyint, col 2:int, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) diff --git ql/src/test/results/clientpositive/spark/vectorization_16.q.out ql/src/test/results/clientpositive/spark/vectorization_16.q.out index 8798ebe..baa24a7 100644 --- ql/src/test/results/clientpositive/spark/vectorization_16.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_16.q.out @@ -85,6 +85,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/spark/vectorization_2.q.out ql/src/test/results/clientpositive/spark/vectorization_2.q.out index 99afc2b..7c943b6 100644 --- ql/src/test/results/clientpositive/spark/vectorization_2.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_2.q.out @@ -90,6 +90,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_3.q.out ql/src/test/results/clientpositive/spark/vectorization_3.q.out index 2bccf64..78c9092 100644 --- ql/src/test/results/clientpositive/spark/vectorization_3.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_3.q.out @@ -95,6 +95,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_4.q.out ql/src/test/results/clientpositive/spark/vectorization_4.q.out index 922eb90..9c1c8e7 100644 --- ql/src/test/results/clientpositive/spark/vectorization_4.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_4.q.out @@ -90,6 +90,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_5.q.out ql/src/test/results/clientpositive/spark/vectorization_5.q.out index 4cf4548..9d80010 100644 --- ql/src/test/results/clientpositive/spark/vectorization_5.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_5.q.out @@ -83,6 +83,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_9.q.out ql/src/test/results/clientpositive/spark/vectorization_9.q.out index 8798ebe..baa24a7 100644 --- ql/src/test/results/clientpositive/spark/vectorization_9.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_9.q.out @@ -85,6 +85,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out index c46fc03..ffaef58 100644 --- ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out @@ -43,6 +43,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorization_parquet_projection.q.out ql/src/test/results/clientpositive/spark/vectorization_parquet_projection.q.out index d58a989..794e051 100644 --- ql/src/test/results/clientpositive/spark/vectorization_parquet_projection.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_parquet_projection.q.out @@ -321,7 +321,7 @@ STAGE PLANS: inputFormatFeatureSupport: [] featureSupportInUse: [] inputFileFormats: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Reducer 2 diff --git ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 6215906..9d709b6 100644 --- ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -117,6 +117,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] mode: hash @@ -377,6 +379,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] mode: hash @@ -629,6 +633,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] mode: hash @@ -860,6 +866,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash @@ -2185,6 +2193,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:smallint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7] keys: _col0 (type: smallint) @@ -2460,6 +2470,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] keys: _col0 (type: double) @@ -2779,6 +2791,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 8:timestamp, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] keys: _col0 (type: timestamp), _col1 (type: string) @@ -3179,6 +3193,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] keys: _col0 (type: boolean) @@ -3415,6 +3431,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3528,6 +3546,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3713,6 +3733,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3826,6 +3848,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -3939,6 +3963,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -4052,6 +4078,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -4165,6 +4193,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -4278,6 +4308,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorized_case.q.out ql/src/test/results/clientpositive/spark/vectorized_case.q.out index 58e295d..735eded 100644 --- ql/src/test/results/clientpositive/spark/vectorized_case.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_case.q.out @@ -303,6 +303,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -445,6 +447,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out index 5104c80..f0e0e88 100644 --- ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out @@ -120,6 +120,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out index edc8f74..59bd302 100644 --- ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out @@ -3494,6 +3494,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:string, col 3:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: p_mfgr (type: string), p_brand (type: string) diff --git ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out index 68b89a7..7366d05 100644 --- ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out @@ -797,6 +797,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -922,6 +924,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -1065,6 +1069,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/vector_aggregate_9.q.out ql/src/test/results/clientpositive/vector_aggregate_9.q.out index 0f7fcc1..468bbb1 100644 --- ql/src/test/results/clientpositive/vector_aggregate_9.q.out +++ ql/src/test/results/clientpositive/vector_aggregate_9.q.out @@ -140,6 +140,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -246,6 +248,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -352,6 +356,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out index 4d2b0dc..18ec1fd 100644 --- ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out +++ ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out @@ -79,6 +79,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out index 3f9e90b..263e0e1 100644 --- ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out +++ ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out @@ -193,6 +193,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -343,6 +345,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:binary native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: bin (type: binary) diff --git ql/src/test/results/clientpositive/vector_cast_constant.q.out ql/src/test/results/clientpositive/vector_cast_constant.q.out index 3d3d761..b6bdb5a 100644 --- ql/src/test/results/clientpositive/vector_cast_constant.q.out +++ ql/src/test/results/clientpositive/vector_cast_constant.q.out @@ -143,6 +143,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5] keys: _col0 (type: int) diff --git ql/src/test/results/clientpositive/vector_char_2.q.out ql/src/test/results/clientpositive/vector_char_2.q.out index b38cbe7..5faf92e 100644 --- ql/src/test/results/clientpositive/vector_char_2.q.out +++ ql/src/test/results/clientpositive/vector_char_2.q.out @@ -104,6 +104,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:char(20) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: char(20)) @@ -292,6 +294,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:char(20) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: char(20)) diff --git ql/src/test/results/clientpositive/vector_coalesce_2.q.out ql/src/test/results/clientpositive/vector_coalesce_2.q.out index 48d38c3..ad2a824 100644 --- ql/src/test/results/clientpositive/vector_coalesce_2.q.out +++ ql/src/test/results/clientpositive/vector_coalesce_2.q.out @@ -72,6 +72,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string) @@ -267,6 +269,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string) diff --git ql/src/test/results/clientpositive/vector_count_simple.q.out ql/src/test/results/clientpositive/vector_count_simple.q.out new file mode 100644 index 0000000..0281951 --- /dev/null +++ ql/src/test/results/clientpositive/vector_count_simple.q.out @@ -0,0 +1,24211 @@ +PREHOOK: query: create table abcd_txt (a int, b int, c int, d int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@abcd_txt +POSTHOOK: query: create table abcd_txt (a int, b int, c int, d int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@abcd_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@abcd_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@abcd_txt +PREHOOK: query: create table abcd stored as orc as select * from abcd_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@abcd_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@abcd +POSTHOOK: query: create table abcd stored as orc as select * from abcd_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@abcd_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@abcd +POSTHOOK: Lineage: abcd.a SIMPLE [(abcd_txt)abcd_txt.FieldSchema(name:a, type:int, comment:null), ] +POSTHOOK: Lineage: abcd.b SIMPLE [(abcd_txt)abcd_txt.FieldSchema(name:b, type:int, comment:null), ] +POSTHOOK: Lineage: abcd.c SIMPLE [(abcd_txt)abcd_txt.FieldSchema(name:c, type:int, comment:null), ] +POSTHOOK: Lineage: abcd.d SIMPLE [(abcd_txt)abcd_txt.FieldSchema(name:d, type:int, comment:null), ] +PREHOOK: query: select * from abcd +PREHOOK: type: QUERY +PREHOOK: Input: default@abcd +#### A masked pattern was here #### +POSTHOOK: query: select * from abcd +POSTHOOK: type: QUERY +POSTHOOK: Input: default@abcd +#### A masked pattern was here #### +10 100 45 4 +10 100 NULL 5 +10 1000 50 1 +100 100 10 3 +12 100 75 7 +12 NULL 80 2 +NULL 35 23 6 +PREHOOK: query: explain vectorization expression +select a, count(a) from abcd group by a +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select a, count(a) from abcd group by a +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: abcd + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: a (type: int) + outputColumnNames: a + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(a) + Group By Vectorization: + aggregators: VectorUDAFCount(col 0:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 0:int + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: a (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 42 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 42 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a, count(a) from abcd group by a +PREHOOK: type: QUERY +PREHOOK: Input: default@abcd +#### A masked pattern was here #### +POSTHOOK: query: select a, count(a) from abcd group by a +POSTHOOK: type: QUERY +POSTHOOK: Input: default@abcd +#### A masked pattern was here #### +10 3 +100 1 +12 2 +NULL 0 +PREHOOK: query: explain vectorization expression +select a, count(b) from abcd group by a +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select a, count(b) from abcd group by a +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: abcd + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: a (type: int), b (type: int) + outputColumnNames: a, b + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(b) + Group By Vectorization: + aggregators: VectorUDAFCount(col 1:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 0:int + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: a (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 42 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 42 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a, count(b) from abcd group by a +PREHOOK: type: QUERY +PREHOOK: Input: default@abcd +#### A masked pattern was here #### +POSTHOOK: query: select a, count(b) from abcd group by a +POSTHOOK: type: QUERY +POSTHOOK: Input: default@abcd +#### A masked pattern was here #### +10 3 +100 1 +12 1 +NULL 1 +PREHOOK: query: explain vectorization expression +select a, count(*) from abcd group by a +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select a, count(*) from abcd group by a +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: abcd + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: a (type: int) + outputColumnNames: a + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + aggregators: VectorUDAFCountStar(*) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 0:int + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: a (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 7 Data size: 100 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [] + featureSupportInUse: [] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 42 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 42 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a, count(*) from abcd group by a +PREHOOK: type: QUERY +PREHOOK: Input: default@abcd +#### A masked pattern was here #### +POSTHOOK: query: select a, count(*) from abcd group by a +POSTHOOK: type: QUERY +POSTHOOK: Input: default@abcd +#### A masked pattern was here #### +10 3 +100 1 +12 2 +NULL 1 +PREHOOK: query: CREATE TABLE over10k(t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal(4,2), + bin binary) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@over10k +POSTHOOK: query: CREATE TABLE over10k(t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + bo boolean, + s string, + ts timestamp, + `dec` decimal(4,2), + bin binary) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@over10k +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over10k' OVERWRITE INTO TABLE over10k +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@over10k +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over10k' OVERWRITE INTO TABLE over10k +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@over10k +PREHOOK: query: explain vectorization expression +select s, count(s) from over10k group by s +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select s, count(s) from over10k group by s +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: s (type: string) + outputColumnNames: s + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [7] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(s) + Group By Vectorization: + aggregators: VectorUDAFCount(col 7:string) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 7:string + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: s (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select s, count(s) from over10k group by s +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select s, count(s) from over10k group by s +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +alice allen 8 +alice brown 14 +alice carson 10 +alice davidson 18 +alice ellison 15 +alice falkner 17 +alice garcia 13 +alice hernandez 18 +alice ichabod 22 +alice johnson 12 +alice king 16 +alice laertes 16 +alice miller 16 +alice nixon 18 +alice ovid 17 +alice polk 14 +alice quirinius 15 +alice robinson 17 +alice steinbeck 16 +alice thompson 9 +alice underhill 14 +alice van buren 9 +alice white 10 +alice xylophone 22 +alice young 11 +alice zipper 12 +bob allen 10 +bob brown 13 +bob carson 23 +bob davidson 13 +bob ellison 14 +bob falkner 17 +bob garcia 15 +bob hernandez 13 +bob ichabod 17 +bob johnson 9 +bob king 18 +bob laertes 17 +bob miller 12 +bob nixon 13 +bob ovid 28 +bob polk 10 +bob quirinius 17 +bob robinson 16 +bob steinbeck 11 +bob thompson 12 +bob underhill 14 +bob van buren 14 +bob white 19 +bob xylophone 21 +bob young 17 +bob zipper 11 +calvin allen 11 +calvin brown 13 +calvin carson 17 +calvin davidson 14 +calvin ellison 14 +calvin falkner 17 +calvin garcia 16 +calvin hernandez 17 +calvin ichabod 13 +calvin johnson 21 +calvin king 17 +calvin laertes 13 +calvin miller 18 +calvin nixon 17 +calvin ovid 16 +calvin polk 15 +calvin quirinius 16 +calvin robinson 13 +calvin steinbeck 15 +calvin thompson 16 +calvin underhill 9 +calvin van buren 15 +calvin white 18 +calvin xylophone 18 +calvin young 16 +calvin zipper 18 +david allen 21 +david brown 15 +david carson 11 +david davidson 13 +david ellison 16 +david falkner 13 +david garcia 15 +david hernandez 8 +david ichabod 7 +david johnson 14 +david king 15 +david laertes 20 +david miller 8 +david nixon 14 +david ovid 16 +david polk 11 +david quirinius 14 +david robinson 15 +david steinbeck 13 +david thompson 12 +david underhill 18 +david van buren 15 +david white 11 +david xylophone 14 +david young 19 +david zipper 17 +ethan allen 15 +ethan brown 17 +ethan carson 22 +ethan davidson 14 +ethan ellison 19 +ethan falkner 14 +ethan garcia 19 +ethan hernandez 13 +ethan ichabod 14 +ethan johnson 11 +ethan king 20 +ethan laertes 20 +ethan miller 9 +ethan nixon 23 +ethan ovid 16 +ethan polk 16 +ethan quirinius 16 +ethan robinson 18 +ethan steinbeck 7 +ethan thompson 24 +ethan underhill 17 +ethan van buren 13 +ethan white 12 +ethan xylophone 17 +ethan young 15 +ethan zipper 14 +fred allen 12 +fred brown 15 +fred carson 9 +fred davidson 13 +fred ellison 19 +fred falkner 12 +fred garcia 5 +fred hernandez 14 +fred ichabod 13 +fred johnson 15 +fred king 14 +fred laertes 12 +fred miller 15 +fred nixon 19 +fred ovid 13 +fred polk 21 +fred quirinius 18 +fred robinson 17 +fred steinbeck 11 +fred thompson 11 +fred underhill 13 +fred van buren 17 +fred white 15 +fred xylophone 11 +fred young 14 +fred zipper 13 +gabriella allen 7 +gabriella brown 19 +gabriella carson 8 +gabriella davidson 12 +gabriella ellison 20 +gabriella falkner 16 +gabriella garcia 15 +gabriella hernandez 19 +gabriella ichabod 19 +gabriella johnson 8 +gabriella king 12 +gabriella laertes 8 +gabriella miller 6 +gabriella nixon 22 +gabriella ovid 6 +gabriella polk 13 +gabriella quirinius 17 +gabriella robinson 16 +gabriella steinbeck 18 +gabriella thompson 13 +gabriella underhill 22 +gabriella van buren 18 +gabriella white 16 +gabriella xylophone 12 +gabriella young 8 +gabriella zipper 13 +holly allen 12 +holly brown 9 +holly carson 12 +holly davidson 9 +holly ellison 10 +holly falkner 24 +holly garcia 15 +holly hernandez 18 +holly ichabod 12 +holly johnson 14 +holly king 12 +holly laertes 9 +holly miller 14 +holly nixon 12 +holly ovid 12 +holly polk 16 +holly quirinius 16 +holly robinson 13 +holly steinbeck 11 +holly thompson 15 +holly underhill 27 +holly van buren 14 +holly white 22 +holly xylophone 18 +holly young 9 +holly zipper 11 +irene allen 9 +irene brown 10 +irene carson 18 +irene davidson 10 +irene ellison 16 +irene falkner 16 +irene garcia 15 +irene hernandez 12 +irene ichabod 14 +irene johnson 18 +irene king 18 +irene laertes 19 +irene miller 16 +irene nixon 17 +irene ovid 14 +irene polk 21 +irene quirinius 23 +irene robinson 13 +irene steinbeck 7 +irene thompson 16 +irene underhill 10 +irene van buren 19 +irene white 10 +irene xylophone 11 +irene young 12 +irene zipper 7 +jessica allen 12 +jessica brown 16 +jessica carson 12 +jessica davidson 24 +jessica ellison 14 +jessica falkner 10 +jessica garcia 16 +jessica hernandez 14 +jessica ichabod 15 +jessica johnson 16 +jessica king 15 +jessica laertes 10 +jessica miller 18 +jessica nixon 18 +jessica ovid 12 +jessica polk 12 +jessica quirinius 16 +jessica robinson 17 +jessica steinbeck 13 +jessica thompson 19 +jessica underhill 13 +jessica van buren 9 +jessica white 24 +jessica xylophone 16 +jessica young 13 +jessica zipper 12 +katie allen 15 +katie brown 16 +katie carson 11 +katie davidson 18 +katie ellison 10 +katie falkner 15 +katie garcia 12 +katie hernandez 9 +katie ichabod 21 +katie johnson 6 +katie king 15 +katie laertes 16 +katie miller 19 +katie nixon 16 +katie ovid 16 +katie polk 17 +katie quirinius 14 +katie robinson 19 +katie steinbeck 18 +katie thompson 16 +katie underhill 9 +katie van buren 15 +katie white 17 +katie xylophone 17 +katie young 14 +katie zipper 17 +luke allen 10 +luke brown 15 +luke carson 12 +luke davidson 15 +luke ellison 15 +luke falkner 18 +luke garcia 14 +luke hernandez 15 +luke ichabod 15 +luke johnson 18 +luke king 10 +luke laertes 22 +luke miller 9 +luke nixon 12 +luke ovid 20 +luke polk 17 +luke quirinius 10 +luke robinson 22 +luke steinbeck 18 +luke thompson 12 +luke underhill 15 +luke van buren 16 +luke white 11 +luke xylophone 16 +luke young 14 +luke zipper 15 +mike allen 16 +mike brown 27 +mike carson 22 +mike davidson 12 +mike ellison 21 +mike falkner 11 +mike garcia 20 +mike hernandez 18 +mike ichabod 15 +mike johnson 16 +mike king 14 +mike laertes 15 +mike miller 11 +mike nixon 15 +mike ovid 12 +mike polk 14 +mike quirinius 8 +mike robinson 10 +mike steinbeck 23 +mike thompson 11 +mike underhill 21 +mike van buren 13 +mike white 17 +mike xylophone 12 +mike young 14 +mike zipper 13 +nick allen 10 +nick brown 19 +nick carson 10 +nick davidson 18 +nick ellison 16 +nick falkner 17 +nick garcia 16 +nick hernandez 21 +nick ichabod 12 +nick johnson 10 +nick king 15 +nick laertes 7 +nick miller 13 +nick nixon 15 +nick ovid 16 +nick polk 14 +nick quirinius 17 +nick robinson 20 +nick steinbeck 16 +nick thompson 11 +nick underhill 17 +nick van buren 19 +nick white 14 +nick xylophone 16 +nick young 15 +nick zipper 21 +oscar allen 17 +oscar brown 9 +oscar carson 24 +oscar davidson 18 +oscar ellison 19 +oscar falkner 15 +oscar garcia 20 +oscar hernandez 9 +oscar ichabod 13 +oscar johnson 13 +oscar king 16 +oscar laertes 17 +oscar miller 13 +oscar nixon 23 +oscar ovid 14 +oscar polk 10 +oscar quirinius 17 +oscar robinson 15 +oscar steinbeck 15 +oscar thompson 19 +oscar underhill 15 +oscar van buren 15 +oscar white 19 +oscar xylophone 16 +oscar young 13 +oscar zipper 20 +priscilla allen 19 +priscilla brown 21 +priscilla carson 14 +priscilla davidson 12 +priscilla ellison 8 +priscilla falkner 15 +priscilla garcia 14 +priscilla hernandez 14 +priscilla ichabod 21 +priscilla johnson 17 +priscilla king 18 +priscilla laertes 15 +priscilla miller 11 +priscilla nixon 19 +priscilla ovid 9 +priscilla polk 14 +priscilla quirinius 11 +priscilla robinson 14 +priscilla steinbeck 12 +priscilla thompson 12 +priscilla underhill 18 +priscilla van buren 17 +priscilla white 9 +priscilla xylophone 9 +priscilla young 13 +priscilla zipper 18 +quinn allen 17 +quinn brown 16 +quinn carson 15 +quinn davidson 16 +quinn ellison 16 +quinn falkner 13 +quinn garcia 17 +quinn hernandez 12 +quinn ichabod 9 +quinn johnson 11 +quinn king 13 +quinn laertes 11 +quinn miller 15 +quinn nixon 17 +quinn ovid 20 +quinn polk 10 +quinn quirinius 17 +quinn robinson 12 +quinn steinbeck 19 +quinn thompson 13 +quinn underhill 19 +quinn van buren 15 +quinn white 14 +quinn xylophone 13 +quinn young 10 +quinn zipper 13 +rachel allen 12 +rachel brown 17 +rachel carson 16 +rachel davidson 19 +rachel ellison 12 +rachel falkner 14 +rachel garcia 13 +rachel hernandez 12 +rachel ichabod 17 +rachel johnson 9 +rachel king 13 +rachel laertes 16 +rachel miller 13 +rachel nixon 16 +rachel ovid 16 +rachel polk 20 +rachel quirinius 13 +rachel robinson 18 +rachel steinbeck 9 +rachel thompson 15 +rachel underhill 12 +rachel van buren 9 +rachel white 9 +rachel xylophone 17 +rachel young 17 +rachel zipper 14 +sarah allen 15 +sarah brown 20 +sarah carson 8 +sarah davidson 10 +sarah ellison 8 +sarah falkner 18 +sarah garcia 12 +sarah hernandez 18 +sarah ichabod 13 +sarah johnson 19 +sarah king 14 +sarah laertes 13 +sarah miller 21 +sarah nixon 9 +sarah ovid 12 +sarah polk 19 +sarah quirinius 12 +sarah robinson 20 +sarah steinbeck 22 +sarah thompson 17 +sarah underhill 14 +sarah van buren 12 +sarah white 14 +sarah xylophone 18 +sarah young 17 +sarah zipper 16 +tom allen 19 +tom brown 15 +tom carson 12 +tom davidson 10 +tom ellison 17 +tom falkner 18 +tom garcia 13 +tom hernandez 23 +tom ichabod 22 +tom johnson 17 +tom king 7 +tom laertes 17 +tom miller 14 +tom nixon 9 +tom ovid 9 +tom polk 10 +tom quirinius 17 +tom robinson 16 +tom steinbeck 13 +tom thompson 11 +tom underhill 14 +tom van buren 11 +tom white 14 +tom xylophone 15 +tom young 20 +tom zipper 13 +ulysses allen 9 +ulysses brown 12 +ulysses carson 19 +ulysses davidson 16 +ulysses ellison 13 +ulysses falkner 15 +ulysses garcia 19 +ulysses hernandez 19 +ulysses ichabod 19 +ulysses johnson 12 +ulysses king 11 +ulysses laertes 9 +ulysses miller 15 +ulysses nixon 12 +ulysses ovid 12 +ulysses polk 16 +ulysses quirinius 14 +ulysses robinson 23 +ulysses steinbeck 11 +ulysses thompson 12 +ulysses underhill 32 +ulysses van buren 8 +ulysses white 19 +ulysses xylophone 20 +ulysses young 12 +ulysses zipper 16 +victor allen 9 +victor brown 15 +victor carson 12 +victor davidson 22 +victor ellison 11 +victor falkner 16 +victor garcia 16 +victor hernandez 21 +victor ichabod 22 +victor johnson 19 +victor king 18 +victor laertes 19 +victor miller 15 +victor nixon 12 +victor ovid 9 +victor polk 15 +victor quirinius 12 +victor robinson 20 +victor steinbeck 20 +victor thompson 13 +victor underhill 16 +victor van buren 13 +victor white 15 +victor xylophone 22 +victor young 18 +victor zipper 12 +wendy allen 11 +wendy brown 17 +wendy carson 11 +wendy davidson 9 +wendy ellison 13 +wendy falkner 11 +wendy garcia 22 +wendy hernandez 20 +wendy ichabod 17 +wendy johnson 9 +wendy king 19 +wendy laertes 11 +wendy miller 14 +wendy nixon 18 +wendy ovid 12 +wendy polk 11 +wendy quirinius 10 +wendy robinson 13 +wendy steinbeck 17 +wendy thompson 16 +wendy underhill 16 +wendy van buren 17 +wendy white 4 +wendy xylophone 10 +wendy young 17 +wendy zipper 15 +xavier allen 18 +xavier brown 23 +xavier carson 17 +xavier davidson 17 +xavier ellison 10 +xavier falkner 13 +xavier garcia 12 +xavier hernandez 12 +xavier ichabod 16 +xavier johnson 17 +xavier king 14 +xavier laertes 14 +xavier miller 13 +xavier nixon 10 +xavier ovid 13 +xavier polk 15 +xavier quirinius 16 +xavier robinson 20 +xavier steinbeck 12 +xavier thompson 12 +xavier underhill 15 +xavier van buren 15 +xavier white 13 +xavier xylophone 5 +xavier young 10 +xavier zipper 13 +yuri allen 15 +yuri brown 21 +yuri carson 15 +yuri davidson 15 +yuri ellison 17 +yuri falkner 16 +yuri garcia 10 +yuri hernandez 17 +yuri ichabod 19 +yuri johnson 16 +yuri king 15 +yuri laertes 14 +yuri miller 11 +yuri nixon 16 +yuri ovid 9 +yuri polk 23 +yuri quirinius 15 +yuri robinson 11 +yuri steinbeck 16 +yuri thompson 22 +yuri underhill 10 +yuri van buren 10 +yuri white 17 +yuri xylophone 18 +yuri young 7 +yuri zipper 10 +zach allen 21 +zach brown 17 +zach carson 19 +zach davidson 16 +zach ellison 10 +zach falkner 22 +zach garcia 13 +zach hernandez 12 +zach ichabod 14 +zach johnson 13 +zach king 14 +zach laertes 16 +zach miller 14 +zach nixon 17 +zach ovid 17 +zach polk 15 +zach quirinius 12 +zach robinson 9 +zach steinbeck 14 +zach thompson 13 +zach underhill 20 +zach van buren 15 +zach white 20 +zach xylophone 22 +zach young 20 +zach zipper 18 +PREHOOK: query: explain vectorization expression +select s, count(ts) from over10k group by s +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select s, count(ts) from over10k group by s +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: s (type: string), ts (type: timestamp) + outputColumnNames: s, ts + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [7, 8] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(ts) + Group By Vectorization: + aggregators: VectorUDAFCount(col 8:timestamp) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 7:string + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: s (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select s, count(ts) from over10k group by s +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select s, count(ts) from over10k group by s +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +alice allen 8 +alice brown 14 +alice carson 10 +alice davidson 18 +alice ellison 15 +alice falkner 17 +alice garcia 13 +alice hernandez 18 +alice ichabod 22 +alice johnson 12 +alice king 16 +alice laertes 16 +alice miller 16 +alice nixon 18 +alice ovid 17 +alice polk 14 +alice quirinius 15 +alice robinson 17 +alice steinbeck 16 +alice thompson 9 +alice underhill 14 +alice van buren 9 +alice white 10 +alice xylophone 22 +alice young 11 +alice zipper 12 +bob allen 10 +bob brown 13 +bob carson 23 +bob davidson 13 +bob ellison 14 +bob falkner 17 +bob garcia 15 +bob hernandez 13 +bob ichabod 17 +bob johnson 9 +bob king 18 +bob laertes 17 +bob miller 12 +bob nixon 13 +bob ovid 28 +bob polk 10 +bob quirinius 17 +bob robinson 16 +bob steinbeck 11 +bob thompson 12 +bob underhill 14 +bob van buren 14 +bob white 19 +bob xylophone 21 +bob young 17 +bob zipper 11 +calvin allen 11 +calvin brown 13 +calvin carson 17 +calvin davidson 14 +calvin ellison 14 +calvin falkner 17 +calvin garcia 16 +calvin hernandez 17 +calvin ichabod 13 +calvin johnson 21 +calvin king 17 +calvin laertes 13 +calvin miller 18 +calvin nixon 17 +calvin ovid 16 +calvin polk 15 +calvin quirinius 16 +calvin robinson 13 +calvin steinbeck 15 +calvin thompson 16 +calvin underhill 9 +calvin van buren 15 +calvin white 18 +calvin xylophone 18 +calvin young 16 +calvin zipper 18 +david allen 21 +david brown 15 +david carson 11 +david davidson 13 +david ellison 16 +david falkner 13 +david garcia 15 +david hernandez 8 +david ichabod 7 +david johnson 14 +david king 15 +david laertes 20 +david miller 8 +david nixon 14 +david ovid 16 +david polk 11 +david quirinius 14 +david robinson 15 +david steinbeck 13 +david thompson 12 +david underhill 18 +david van buren 15 +david white 11 +david xylophone 14 +david young 19 +david zipper 17 +ethan allen 15 +ethan brown 17 +ethan carson 22 +ethan davidson 14 +ethan ellison 19 +ethan falkner 14 +ethan garcia 19 +ethan hernandez 13 +ethan ichabod 14 +ethan johnson 11 +ethan king 20 +ethan laertes 20 +ethan miller 9 +ethan nixon 23 +ethan ovid 16 +ethan polk 16 +ethan quirinius 16 +ethan robinson 18 +ethan steinbeck 7 +ethan thompson 24 +ethan underhill 17 +ethan van buren 13 +ethan white 12 +ethan xylophone 17 +ethan young 15 +ethan zipper 14 +fred allen 12 +fred brown 15 +fred carson 9 +fred davidson 13 +fred ellison 19 +fred falkner 12 +fred garcia 5 +fred hernandez 14 +fred ichabod 13 +fred johnson 15 +fred king 14 +fred laertes 12 +fred miller 15 +fred nixon 19 +fred ovid 13 +fred polk 21 +fred quirinius 18 +fred robinson 17 +fred steinbeck 11 +fred thompson 11 +fred underhill 13 +fred van buren 17 +fred white 15 +fred xylophone 11 +fred young 14 +fred zipper 13 +gabriella allen 7 +gabriella brown 19 +gabriella carson 8 +gabriella davidson 12 +gabriella ellison 20 +gabriella falkner 16 +gabriella garcia 15 +gabriella hernandez 19 +gabriella ichabod 19 +gabriella johnson 8 +gabriella king 12 +gabriella laertes 8 +gabriella miller 6 +gabriella nixon 22 +gabriella ovid 6 +gabriella polk 13 +gabriella quirinius 17 +gabriella robinson 16 +gabriella steinbeck 18 +gabriella thompson 13 +gabriella underhill 22 +gabriella van buren 18 +gabriella white 16 +gabriella xylophone 12 +gabriella young 8 +gabriella zipper 13 +holly allen 12 +holly brown 9 +holly carson 12 +holly davidson 9 +holly ellison 10 +holly falkner 24 +holly garcia 15 +holly hernandez 18 +holly ichabod 12 +holly johnson 14 +holly king 12 +holly laertes 9 +holly miller 14 +holly nixon 12 +holly ovid 12 +holly polk 16 +holly quirinius 16 +holly robinson 13 +holly steinbeck 11 +holly thompson 15 +holly underhill 27 +holly van buren 14 +holly white 22 +holly xylophone 18 +holly young 9 +holly zipper 11 +irene allen 9 +irene brown 10 +irene carson 18 +irene davidson 10 +irene ellison 16 +irene falkner 16 +irene garcia 15 +irene hernandez 12 +irene ichabod 14 +irene johnson 18 +irene king 18 +irene laertes 19 +irene miller 16 +irene nixon 17 +irene ovid 14 +irene polk 21 +irene quirinius 23 +irene robinson 13 +irene steinbeck 7 +irene thompson 16 +irene underhill 10 +irene van buren 19 +irene white 10 +irene xylophone 11 +irene young 12 +irene zipper 7 +jessica allen 12 +jessica brown 16 +jessica carson 12 +jessica davidson 24 +jessica ellison 14 +jessica falkner 10 +jessica garcia 16 +jessica hernandez 14 +jessica ichabod 15 +jessica johnson 16 +jessica king 15 +jessica laertes 10 +jessica miller 18 +jessica nixon 18 +jessica ovid 12 +jessica polk 12 +jessica quirinius 16 +jessica robinson 17 +jessica steinbeck 13 +jessica thompson 19 +jessica underhill 13 +jessica van buren 9 +jessica white 24 +jessica xylophone 16 +jessica young 13 +jessica zipper 12 +katie allen 15 +katie brown 16 +katie carson 11 +katie davidson 18 +katie ellison 10 +katie falkner 15 +katie garcia 12 +katie hernandez 9 +katie ichabod 21 +katie johnson 6 +katie king 15 +katie laertes 16 +katie miller 19 +katie nixon 16 +katie ovid 16 +katie polk 17 +katie quirinius 14 +katie robinson 19 +katie steinbeck 18 +katie thompson 16 +katie underhill 9 +katie van buren 15 +katie white 17 +katie xylophone 17 +katie young 14 +katie zipper 17 +luke allen 10 +luke brown 15 +luke carson 12 +luke davidson 15 +luke ellison 15 +luke falkner 18 +luke garcia 14 +luke hernandez 15 +luke ichabod 15 +luke johnson 18 +luke king 10 +luke laertes 22 +luke miller 9 +luke nixon 12 +luke ovid 20 +luke polk 17 +luke quirinius 10 +luke robinson 22 +luke steinbeck 18 +luke thompson 12 +luke underhill 15 +luke van buren 16 +luke white 11 +luke xylophone 16 +luke young 14 +luke zipper 15 +mike allen 16 +mike brown 27 +mike carson 22 +mike davidson 12 +mike ellison 21 +mike falkner 11 +mike garcia 20 +mike hernandez 18 +mike ichabod 15 +mike johnson 16 +mike king 14 +mike laertes 15 +mike miller 11 +mike nixon 15 +mike ovid 12 +mike polk 14 +mike quirinius 8 +mike robinson 10 +mike steinbeck 23 +mike thompson 11 +mike underhill 21 +mike van buren 13 +mike white 17 +mike xylophone 12 +mike young 14 +mike zipper 13 +nick allen 10 +nick brown 19 +nick carson 10 +nick davidson 18 +nick ellison 16 +nick falkner 17 +nick garcia 16 +nick hernandez 21 +nick ichabod 12 +nick johnson 10 +nick king 15 +nick laertes 7 +nick miller 13 +nick nixon 15 +nick ovid 16 +nick polk 14 +nick quirinius 17 +nick robinson 20 +nick steinbeck 16 +nick thompson 11 +nick underhill 17 +nick van buren 19 +nick white 14 +nick xylophone 16 +nick young 15 +nick zipper 21 +oscar allen 17 +oscar brown 9 +oscar carson 24 +oscar davidson 18 +oscar ellison 19 +oscar falkner 15 +oscar garcia 20 +oscar hernandez 9 +oscar ichabod 13 +oscar johnson 13 +oscar king 16 +oscar laertes 17 +oscar miller 13 +oscar nixon 23 +oscar ovid 14 +oscar polk 10 +oscar quirinius 17 +oscar robinson 15 +oscar steinbeck 15 +oscar thompson 19 +oscar underhill 15 +oscar van buren 15 +oscar white 19 +oscar xylophone 16 +oscar young 13 +oscar zipper 20 +priscilla allen 19 +priscilla brown 21 +priscilla carson 14 +priscilla davidson 12 +priscilla ellison 8 +priscilla falkner 15 +priscilla garcia 14 +priscilla hernandez 14 +priscilla ichabod 21 +priscilla johnson 17 +priscilla king 18 +priscilla laertes 15 +priscilla miller 11 +priscilla nixon 19 +priscilla ovid 9 +priscilla polk 14 +priscilla quirinius 11 +priscilla robinson 14 +priscilla steinbeck 12 +priscilla thompson 12 +priscilla underhill 18 +priscilla van buren 17 +priscilla white 9 +priscilla xylophone 9 +priscilla young 13 +priscilla zipper 18 +quinn allen 17 +quinn brown 16 +quinn carson 15 +quinn davidson 16 +quinn ellison 16 +quinn falkner 13 +quinn garcia 17 +quinn hernandez 12 +quinn ichabod 9 +quinn johnson 11 +quinn king 13 +quinn laertes 11 +quinn miller 15 +quinn nixon 17 +quinn ovid 20 +quinn polk 10 +quinn quirinius 17 +quinn robinson 12 +quinn steinbeck 19 +quinn thompson 13 +quinn underhill 19 +quinn van buren 15 +quinn white 14 +quinn xylophone 13 +quinn young 10 +quinn zipper 13 +rachel allen 12 +rachel brown 17 +rachel carson 16 +rachel davidson 19 +rachel ellison 12 +rachel falkner 14 +rachel garcia 13 +rachel hernandez 12 +rachel ichabod 17 +rachel johnson 9 +rachel king 13 +rachel laertes 16 +rachel miller 13 +rachel nixon 16 +rachel ovid 16 +rachel polk 20 +rachel quirinius 13 +rachel robinson 18 +rachel steinbeck 9 +rachel thompson 15 +rachel underhill 12 +rachel van buren 9 +rachel white 9 +rachel xylophone 17 +rachel young 17 +rachel zipper 14 +sarah allen 15 +sarah brown 20 +sarah carson 8 +sarah davidson 10 +sarah ellison 8 +sarah falkner 18 +sarah garcia 12 +sarah hernandez 18 +sarah ichabod 13 +sarah johnson 19 +sarah king 14 +sarah laertes 13 +sarah miller 21 +sarah nixon 9 +sarah ovid 12 +sarah polk 19 +sarah quirinius 12 +sarah robinson 20 +sarah steinbeck 22 +sarah thompson 17 +sarah underhill 14 +sarah van buren 12 +sarah white 14 +sarah xylophone 18 +sarah young 17 +sarah zipper 16 +tom allen 19 +tom brown 15 +tom carson 12 +tom davidson 10 +tom ellison 17 +tom falkner 18 +tom garcia 13 +tom hernandez 23 +tom ichabod 22 +tom johnson 17 +tom king 7 +tom laertes 17 +tom miller 14 +tom nixon 9 +tom ovid 9 +tom polk 10 +tom quirinius 17 +tom robinson 16 +tom steinbeck 13 +tom thompson 11 +tom underhill 14 +tom van buren 11 +tom white 14 +tom xylophone 15 +tom young 20 +tom zipper 13 +ulysses allen 9 +ulysses brown 12 +ulysses carson 19 +ulysses davidson 16 +ulysses ellison 13 +ulysses falkner 15 +ulysses garcia 19 +ulysses hernandez 19 +ulysses ichabod 19 +ulysses johnson 12 +ulysses king 11 +ulysses laertes 9 +ulysses miller 15 +ulysses nixon 12 +ulysses ovid 12 +ulysses polk 16 +ulysses quirinius 14 +ulysses robinson 23 +ulysses steinbeck 11 +ulysses thompson 12 +ulysses underhill 32 +ulysses van buren 8 +ulysses white 19 +ulysses xylophone 20 +ulysses young 12 +ulysses zipper 16 +victor allen 9 +victor brown 15 +victor carson 12 +victor davidson 22 +victor ellison 11 +victor falkner 16 +victor garcia 16 +victor hernandez 21 +victor ichabod 22 +victor johnson 19 +victor king 18 +victor laertes 19 +victor miller 15 +victor nixon 12 +victor ovid 9 +victor polk 15 +victor quirinius 12 +victor robinson 20 +victor steinbeck 20 +victor thompson 13 +victor underhill 16 +victor van buren 13 +victor white 15 +victor xylophone 22 +victor young 18 +victor zipper 12 +wendy allen 11 +wendy brown 17 +wendy carson 11 +wendy davidson 9 +wendy ellison 13 +wendy falkner 11 +wendy garcia 22 +wendy hernandez 20 +wendy ichabod 17 +wendy johnson 9 +wendy king 19 +wendy laertes 11 +wendy miller 14 +wendy nixon 18 +wendy ovid 12 +wendy polk 11 +wendy quirinius 10 +wendy robinson 13 +wendy steinbeck 17 +wendy thompson 16 +wendy underhill 16 +wendy van buren 17 +wendy white 4 +wendy xylophone 10 +wendy young 17 +wendy zipper 15 +xavier allen 18 +xavier brown 23 +xavier carson 17 +xavier davidson 17 +xavier ellison 10 +xavier falkner 13 +xavier garcia 12 +xavier hernandez 12 +xavier ichabod 16 +xavier johnson 17 +xavier king 14 +xavier laertes 14 +xavier miller 13 +xavier nixon 10 +xavier ovid 13 +xavier polk 15 +xavier quirinius 16 +xavier robinson 20 +xavier steinbeck 12 +xavier thompson 12 +xavier underhill 15 +xavier van buren 15 +xavier white 13 +xavier xylophone 5 +xavier young 10 +xavier zipper 13 +yuri allen 15 +yuri brown 21 +yuri carson 15 +yuri davidson 15 +yuri ellison 17 +yuri falkner 16 +yuri garcia 10 +yuri hernandez 17 +yuri ichabod 19 +yuri johnson 16 +yuri king 15 +yuri laertes 14 +yuri miller 11 +yuri nixon 16 +yuri ovid 9 +yuri polk 23 +yuri quirinius 15 +yuri robinson 11 +yuri steinbeck 16 +yuri thompson 22 +yuri underhill 10 +yuri van buren 10 +yuri white 17 +yuri xylophone 18 +yuri young 7 +yuri zipper 10 +zach allen 21 +zach brown 17 +zach carson 19 +zach davidson 16 +zach ellison 10 +zach falkner 22 +zach garcia 13 +zach hernandez 12 +zach ichabod 14 +zach johnson 13 +zach king 14 +zach laertes 16 +zach miller 14 +zach nixon 17 +zach ovid 17 +zach polk 15 +zach quirinius 12 +zach robinson 9 +zach steinbeck 14 +zach thompson 13 +zach underhill 20 +zach van buren 15 +zach white 20 +zach xylophone 22 +zach young 20 +zach zipper 18 +PREHOOK: query: explain vectorization expression +select s, count(*) from over10k group by s +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select s, count(*) from over10k group by s +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: s (type: string) + outputColumnNames: s + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [7] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + aggregators: VectorUDAFCountStar(*) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 7:string + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: s (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select s, count(*) from over10k group by s +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select s, count(*) from over10k group by s +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +alice allen 8 +alice brown 14 +alice carson 10 +alice davidson 18 +alice ellison 15 +alice falkner 17 +alice garcia 13 +alice hernandez 18 +alice ichabod 22 +alice johnson 12 +alice king 16 +alice laertes 16 +alice miller 16 +alice nixon 18 +alice ovid 17 +alice polk 14 +alice quirinius 15 +alice robinson 17 +alice steinbeck 16 +alice thompson 9 +alice underhill 14 +alice van buren 9 +alice white 10 +alice xylophone 22 +alice young 11 +alice zipper 12 +bob allen 10 +bob brown 13 +bob carson 23 +bob davidson 13 +bob ellison 14 +bob falkner 17 +bob garcia 15 +bob hernandez 13 +bob ichabod 17 +bob johnson 9 +bob king 18 +bob laertes 17 +bob miller 12 +bob nixon 13 +bob ovid 28 +bob polk 10 +bob quirinius 17 +bob robinson 16 +bob steinbeck 11 +bob thompson 12 +bob underhill 14 +bob van buren 14 +bob white 19 +bob xylophone 21 +bob young 17 +bob zipper 11 +calvin allen 11 +calvin brown 13 +calvin carson 17 +calvin davidson 14 +calvin ellison 14 +calvin falkner 17 +calvin garcia 16 +calvin hernandez 17 +calvin ichabod 13 +calvin johnson 21 +calvin king 17 +calvin laertes 13 +calvin miller 18 +calvin nixon 17 +calvin ovid 16 +calvin polk 15 +calvin quirinius 16 +calvin robinson 13 +calvin steinbeck 15 +calvin thompson 16 +calvin underhill 9 +calvin van buren 15 +calvin white 18 +calvin xylophone 18 +calvin young 16 +calvin zipper 18 +david allen 21 +david brown 15 +david carson 11 +david davidson 13 +david ellison 16 +david falkner 13 +david garcia 15 +david hernandez 8 +david ichabod 7 +david johnson 14 +david king 15 +david laertes 20 +david miller 8 +david nixon 14 +david ovid 16 +david polk 11 +david quirinius 14 +david robinson 15 +david steinbeck 13 +david thompson 12 +david underhill 18 +david van buren 15 +david white 11 +david xylophone 14 +david young 19 +david zipper 17 +ethan allen 15 +ethan brown 17 +ethan carson 22 +ethan davidson 14 +ethan ellison 19 +ethan falkner 14 +ethan garcia 19 +ethan hernandez 13 +ethan ichabod 14 +ethan johnson 11 +ethan king 20 +ethan laertes 20 +ethan miller 9 +ethan nixon 23 +ethan ovid 16 +ethan polk 16 +ethan quirinius 16 +ethan robinson 18 +ethan steinbeck 7 +ethan thompson 24 +ethan underhill 17 +ethan van buren 13 +ethan white 12 +ethan xylophone 17 +ethan young 15 +ethan zipper 14 +fred allen 12 +fred brown 15 +fred carson 9 +fred davidson 13 +fred ellison 19 +fred falkner 12 +fred garcia 5 +fred hernandez 14 +fred ichabod 13 +fred johnson 15 +fred king 14 +fred laertes 12 +fred miller 15 +fred nixon 19 +fred ovid 13 +fred polk 21 +fred quirinius 18 +fred robinson 17 +fred steinbeck 11 +fred thompson 11 +fred underhill 13 +fred van buren 17 +fred white 15 +fred xylophone 11 +fred young 14 +fred zipper 13 +gabriella allen 7 +gabriella brown 19 +gabriella carson 8 +gabriella davidson 12 +gabriella ellison 20 +gabriella falkner 16 +gabriella garcia 15 +gabriella hernandez 19 +gabriella ichabod 19 +gabriella johnson 8 +gabriella king 12 +gabriella laertes 8 +gabriella miller 6 +gabriella nixon 22 +gabriella ovid 6 +gabriella polk 13 +gabriella quirinius 17 +gabriella robinson 16 +gabriella steinbeck 18 +gabriella thompson 13 +gabriella underhill 22 +gabriella van buren 18 +gabriella white 16 +gabriella xylophone 12 +gabriella young 8 +gabriella zipper 13 +holly allen 12 +holly brown 9 +holly carson 12 +holly davidson 9 +holly ellison 10 +holly falkner 24 +holly garcia 15 +holly hernandez 18 +holly ichabod 12 +holly johnson 14 +holly king 12 +holly laertes 9 +holly miller 14 +holly nixon 12 +holly ovid 12 +holly polk 16 +holly quirinius 16 +holly robinson 13 +holly steinbeck 11 +holly thompson 15 +holly underhill 27 +holly van buren 14 +holly white 22 +holly xylophone 18 +holly young 9 +holly zipper 11 +irene allen 9 +irene brown 10 +irene carson 18 +irene davidson 10 +irene ellison 16 +irene falkner 16 +irene garcia 15 +irene hernandez 12 +irene ichabod 14 +irene johnson 18 +irene king 18 +irene laertes 19 +irene miller 16 +irene nixon 17 +irene ovid 14 +irene polk 21 +irene quirinius 23 +irene robinson 13 +irene steinbeck 7 +irene thompson 16 +irene underhill 10 +irene van buren 19 +irene white 10 +irene xylophone 11 +irene young 12 +irene zipper 7 +jessica allen 12 +jessica brown 16 +jessica carson 12 +jessica davidson 24 +jessica ellison 14 +jessica falkner 10 +jessica garcia 16 +jessica hernandez 14 +jessica ichabod 15 +jessica johnson 16 +jessica king 15 +jessica laertes 10 +jessica miller 18 +jessica nixon 18 +jessica ovid 12 +jessica polk 12 +jessica quirinius 16 +jessica robinson 17 +jessica steinbeck 13 +jessica thompson 19 +jessica underhill 13 +jessica van buren 9 +jessica white 24 +jessica xylophone 16 +jessica young 13 +jessica zipper 12 +katie allen 15 +katie brown 16 +katie carson 11 +katie davidson 18 +katie ellison 10 +katie falkner 15 +katie garcia 12 +katie hernandez 9 +katie ichabod 21 +katie johnson 6 +katie king 15 +katie laertes 16 +katie miller 19 +katie nixon 16 +katie ovid 16 +katie polk 17 +katie quirinius 14 +katie robinson 19 +katie steinbeck 18 +katie thompson 16 +katie underhill 9 +katie van buren 15 +katie white 17 +katie xylophone 17 +katie young 14 +katie zipper 17 +luke allen 10 +luke brown 15 +luke carson 12 +luke davidson 15 +luke ellison 15 +luke falkner 18 +luke garcia 14 +luke hernandez 15 +luke ichabod 15 +luke johnson 18 +luke king 10 +luke laertes 22 +luke miller 9 +luke nixon 12 +luke ovid 20 +luke polk 17 +luke quirinius 10 +luke robinson 22 +luke steinbeck 18 +luke thompson 12 +luke underhill 15 +luke van buren 16 +luke white 11 +luke xylophone 16 +luke young 14 +luke zipper 15 +mike allen 16 +mike brown 27 +mike carson 22 +mike davidson 12 +mike ellison 21 +mike falkner 11 +mike garcia 20 +mike hernandez 18 +mike ichabod 15 +mike johnson 16 +mike king 14 +mike laertes 15 +mike miller 11 +mike nixon 15 +mike ovid 12 +mike polk 14 +mike quirinius 8 +mike robinson 10 +mike steinbeck 23 +mike thompson 11 +mike underhill 21 +mike van buren 13 +mike white 17 +mike xylophone 12 +mike young 14 +mike zipper 13 +nick allen 10 +nick brown 19 +nick carson 10 +nick davidson 18 +nick ellison 16 +nick falkner 17 +nick garcia 16 +nick hernandez 21 +nick ichabod 12 +nick johnson 10 +nick king 15 +nick laertes 7 +nick miller 13 +nick nixon 15 +nick ovid 16 +nick polk 14 +nick quirinius 17 +nick robinson 20 +nick steinbeck 16 +nick thompson 11 +nick underhill 17 +nick van buren 19 +nick white 14 +nick xylophone 16 +nick young 15 +nick zipper 21 +oscar allen 17 +oscar brown 9 +oscar carson 24 +oscar davidson 18 +oscar ellison 19 +oscar falkner 15 +oscar garcia 20 +oscar hernandez 9 +oscar ichabod 13 +oscar johnson 13 +oscar king 16 +oscar laertes 17 +oscar miller 13 +oscar nixon 23 +oscar ovid 14 +oscar polk 10 +oscar quirinius 17 +oscar robinson 15 +oscar steinbeck 15 +oscar thompson 19 +oscar underhill 15 +oscar van buren 15 +oscar white 19 +oscar xylophone 16 +oscar young 13 +oscar zipper 20 +priscilla allen 19 +priscilla brown 21 +priscilla carson 14 +priscilla davidson 12 +priscilla ellison 8 +priscilla falkner 15 +priscilla garcia 14 +priscilla hernandez 14 +priscilla ichabod 21 +priscilla johnson 17 +priscilla king 18 +priscilla laertes 15 +priscilla miller 11 +priscilla nixon 19 +priscilla ovid 9 +priscilla polk 14 +priscilla quirinius 11 +priscilla robinson 14 +priscilla steinbeck 12 +priscilla thompson 12 +priscilla underhill 18 +priscilla van buren 17 +priscilla white 9 +priscilla xylophone 9 +priscilla young 13 +priscilla zipper 18 +quinn allen 17 +quinn brown 16 +quinn carson 15 +quinn davidson 16 +quinn ellison 16 +quinn falkner 13 +quinn garcia 17 +quinn hernandez 12 +quinn ichabod 9 +quinn johnson 11 +quinn king 13 +quinn laertes 11 +quinn miller 15 +quinn nixon 17 +quinn ovid 20 +quinn polk 10 +quinn quirinius 17 +quinn robinson 12 +quinn steinbeck 19 +quinn thompson 13 +quinn underhill 19 +quinn van buren 15 +quinn white 14 +quinn xylophone 13 +quinn young 10 +quinn zipper 13 +rachel allen 12 +rachel brown 17 +rachel carson 16 +rachel davidson 19 +rachel ellison 12 +rachel falkner 14 +rachel garcia 13 +rachel hernandez 12 +rachel ichabod 17 +rachel johnson 9 +rachel king 13 +rachel laertes 16 +rachel miller 13 +rachel nixon 16 +rachel ovid 16 +rachel polk 20 +rachel quirinius 13 +rachel robinson 18 +rachel steinbeck 9 +rachel thompson 15 +rachel underhill 12 +rachel van buren 9 +rachel white 9 +rachel xylophone 17 +rachel young 17 +rachel zipper 14 +sarah allen 15 +sarah brown 20 +sarah carson 8 +sarah davidson 10 +sarah ellison 8 +sarah falkner 18 +sarah garcia 12 +sarah hernandez 18 +sarah ichabod 13 +sarah johnson 19 +sarah king 14 +sarah laertes 13 +sarah miller 21 +sarah nixon 9 +sarah ovid 12 +sarah polk 19 +sarah quirinius 12 +sarah robinson 20 +sarah steinbeck 22 +sarah thompson 17 +sarah underhill 14 +sarah van buren 12 +sarah white 14 +sarah xylophone 18 +sarah young 17 +sarah zipper 16 +tom allen 19 +tom brown 15 +tom carson 12 +tom davidson 10 +tom ellison 17 +tom falkner 18 +tom garcia 13 +tom hernandez 23 +tom ichabod 22 +tom johnson 17 +tom king 7 +tom laertes 17 +tom miller 14 +tom nixon 9 +tom ovid 9 +tom polk 10 +tom quirinius 17 +tom robinson 16 +tom steinbeck 13 +tom thompson 11 +tom underhill 14 +tom van buren 11 +tom white 14 +tom xylophone 15 +tom young 20 +tom zipper 13 +ulysses allen 9 +ulysses brown 12 +ulysses carson 19 +ulysses davidson 16 +ulysses ellison 13 +ulysses falkner 15 +ulysses garcia 19 +ulysses hernandez 19 +ulysses ichabod 19 +ulysses johnson 12 +ulysses king 11 +ulysses laertes 9 +ulysses miller 15 +ulysses nixon 12 +ulysses ovid 12 +ulysses polk 16 +ulysses quirinius 14 +ulysses robinson 23 +ulysses steinbeck 11 +ulysses thompson 12 +ulysses underhill 32 +ulysses van buren 8 +ulysses white 19 +ulysses xylophone 20 +ulysses young 12 +ulysses zipper 16 +victor allen 9 +victor brown 15 +victor carson 12 +victor davidson 22 +victor ellison 11 +victor falkner 16 +victor garcia 16 +victor hernandez 21 +victor ichabod 22 +victor johnson 19 +victor king 18 +victor laertes 19 +victor miller 15 +victor nixon 12 +victor ovid 9 +victor polk 15 +victor quirinius 12 +victor robinson 20 +victor steinbeck 20 +victor thompson 13 +victor underhill 16 +victor van buren 13 +victor white 15 +victor xylophone 22 +victor young 18 +victor zipper 12 +wendy allen 11 +wendy brown 17 +wendy carson 11 +wendy davidson 9 +wendy ellison 13 +wendy falkner 11 +wendy garcia 22 +wendy hernandez 20 +wendy ichabod 17 +wendy johnson 9 +wendy king 19 +wendy laertes 11 +wendy miller 14 +wendy nixon 18 +wendy ovid 12 +wendy polk 11 +wendy quirinius 10 +wendy robinson 13 +wendy steinbeck 17 +wendy thompson 16 +wendy underhill 16 +wendy van buren 17 +wendy white 4 +wendy xylophone 10 +wendy young 17 +wendy zipper 15 +xavier allen 18 +xavier brown 23 +xavier carson 17 +xavier davidson 17 +xavier ellison 10 +xavier falkner 13 +xavier garcia 12 +xavier hernandez 12 +xavier ichabod 16 +xavier johnson 17 +xavier king 14 +xavier laertes 14 +xavier miller 13 +xavier nixon 10 +xavier ovid 13 +xavier polk 15 +xavier quirinius 16 +xavier robinson 20 +xavier steinbeck 12 +xavier thompson 12 +xavier underhill 15 +xavier van buren 15 +xavier white 13 +xavier xylophone 5 +xavier young 10 +xavier zipper 13 +yuri allen 15 +yuri brown 21 +yuri carson 15 +yuri davidson 15 +yuri ellison 17 +yuri falkner 16 +yuri garcia 10 +yuri hernandez 17 +yuri ichabod 19 +yuri johnson 16 +yuri king 15 +yuri laertes 14 +yuri miller 11 +yuri nixon 16 +yuri ovid 9 +yuri polk 23 +yuri quirinius 15 +yuri robinson 11 +yuri steinbeck 16 +yuri thompson 22 +yuri underhill 10 +yuri van buren 10 +yuri white 17 +yuri xylophone 18 +yuri young 7 +yuri zipper 10 +zach allen 21 +zach brown 17 +zach carson 19 +zach davidson 16 +zach ellison 10 +zach falkner 22 +zach garcia 13 +zach hernandez 12 +zach ichabod 14 +zach johnson 13 +zach king 14 +zach laertes 16 +zach miller 14 +zach nixon 17 +zach ovid 17 +zach polk 15 +zach quirinius 12 +zach robinson 9 +zach steinbeck 14 +zach thompson 13 +zach underhill 20 +zach van buren 15 +zach white 20 +zach xylophone 22 +zach young 20 +zach zipper 18 +PREHOOK: query: explain vectorization expression +select ts, count(ts) from over10k group by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select ts, count(ts) from over10k group by ts +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: ts + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [8] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(ts) + Group By Vectorization: + aggregators: VectorUDAFCount(col 8:timestamp) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 8:timestamp + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: ts (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts, count(ts) from over10k group by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select ts, count(ts) from over10k group by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +2013-03-01 09:11:58.70307 26 +2013-03-01 09:11:58.703071 50 +2013-03-01 09:11:58.703072 32 +2013-03-01 09:11:58.703073 42 +2013-03-01 09:11:58.703074 45 +2013-03-01 09:11:58.703075 38 +2013-03-01 09:11:58.703076 45 +2013-03-01 09:11:58.703077 50 +2013-03-01 09:11:58.703078 24 +2013-03-01 09:11:58.703079 43 +2013-03-01 09:11:58.70308 25 +2013-03-01 09:11:58.703081 36 +2013-03-01 09:11:58.703082 38 +2013-03-01 09:11:58.703083 42 +2013-03-01 09:11:58.703084 44 +2013-03-01 09:11:58.703085 33 +2013-03-01 09:11:58.703086 40 +2013-03-01 09:11:58.703087 39 +2013-03-01 09:11:58.703088 30 +2013-03-01 09:11:58.703089 36 +2013-03-01 09:11:58.70309 35 +2013-03-01 09:11:58.703091 38 +2013-03-01 09:11:58.703092 38 +2013-03-01 09:11:58.703093 43 +2013-03-01 09:11:58.703094 44 +2013-03-01 09:11:58.703095 29 +2013-03-01 09:11:58.703096 45 +2013-03-01 09:11:58.703097 37 +2013-03-01 09:11:58.703098 36 +2013-03-01 09:11:58.703099 39 +2013-03-01 09:11:58.7031 34 +2013-03-01 09:11:58.703101 27 +2013-03-01 09:11:58.703102 45 +2013-03-01 09:11:58.703103 38 +2013-03-01 09:11:58.703104 45 +2013-03-01 09:11:58.703105 40 +2013-03-01 09:11:58.703106 43 +2013-03-01 09:11:58.703107 34 +2013-03-01 09:11:58.703108 38 +2013-03-01 09:11:58.703109 35 +2013-03-01 09:11:58.70311 44 +2013-03-01 09:11:58.703111 44 +2013-03-01 09:11:58.703112 45 +2013-03-01 09:11:58.703113 44 +2013-03-01 09:11:58.703114 24 +2013-03-01 09:11:58.703115 38 +2013-03-01 09:11:58.703116 26 +2013-03-01 09:11:58.703117 39 +2013-03-01 09:11:58.703118 27 +2013-03-01 09:11:58.703119 35 +2013-03-01 09:11:58.70312 43 +2013-03-01 09:11:58.703121 45 +2013-03-01 09:11:58.703122 34 +2013-03-01 09:11:58.703123 36 +2013-03-01 09:11:58.703124 50 +2013-03-01 09:11:58.703125 40 +2013-03-01 09:11:58.703126 36 +2013-03-01 09:11:58.703127 45 +2013-03-01 09:11:58.703128 36 +2013-03-01 09:11:58.703129 42 +2013-03-01 09:11:58.70313 41 +2013-03-01 09:11:58.703131 47 +2013-03-01 09:11:58.703132 42 +2013-03-01 09:11:58.703133 42 +2013-03-01 09:11:58.703134 33 +2013-03-01 09:11:58.703135 43 +2013-03-01 09:11:58.703136 38 +2013-03-01 09:11:58.703137 36 +2013-03-01 09:11:58.703138 43 +2013-03-01 09:11:58.703139 45 +2013-03-01 09:11:58.70314 43 +2013-03-01 09:11:58.703141 41 +2013-03-01 09:11:58.703142 38 +2013-03-01 09:11:58.703143 45 +2013-03-01 09:11:58.703144 41 +2013-03-01 09:11:58.703145 36 +2013-03-01 09:11:58.703146 37 +2013-03-01 09:11:58.703147 34 +2013-03-01 09:11:58.703148 31 +2013-03-01 09:11:58.703149 31 +2013-03-01 09:11:58.70315 42 +2013-03-01 09:11:58.703151 42 +2013-03-01 09:11:58.703152 40 +2013-03-01 09:11:58.703153 26 +2013-03-01 09:11:58.703154 28 +2013-03-01 09:11:58.703155 31 +2013-03-01 09:11:58.703156 47 +2013-03-01 09:11:58.703157 46 +2013-03-01 09:11:58.703158 38 +2013-03-01 09:11:58.703159 33 +2013-03-01 09:11:58.70316 35 +2013-03-01 09:11:58.703161 36 +2013-03-01 09:11:58.703162 42 +2013-03-01 09:11:58.703163 37 +2013-03-01 09:11:58.703164 39 +2013-03-01 09:11:58.703165 33 +2013-03-01 09:11:58.703166 44 +2013-03-01 09:11:58.703167 44 +2013-03-01 09:11:58.703168 40 +2013-03-01 09:11:58.703169 38 +2013-03-01 09:11:58.70317 42 +2013-03-01 09:11:58.703171 43 +2013-03-01 09:11:58.703172 34 +2013-03-01 09:11:58.703173 44 +2013-03-01 09:11:58.703174 35 +2013-03-01 09:11:58.703175 41 +2013-03-01 09:11:58.703176 42 +2013-03-01 09:11:58.703177 43 +2013-03-01 09:11:58.703178 42 +2013-03-01 09:11:58.703179 39 +2013-03-01 09:11:58.70318 46 +2013-03-01 09:11:58.703181 30 +2013-03-01 09:11:58.703182 40 +2013-03-01 09:11:58.703183 38 +2013-03-01 09:11:58.703184 37 +2013-03-01 09:11:58.703185 33 +2013-03-01 09:11:58.703186 40 +2013-03-01 09:11:58.703187 25 +2013-03-01 09:11:58.703188 34 +2013-03-01 09:11:58.703189 39 +2013-03-01 09:11:58.70319 44 +2013-03-01 09:11:58.703191 37 +2013-03-01 09:11:58.703192 46 +2013-03-01 09:11:58.703193 32 +2013-03-01 09:11:58.703194 43 +2013-03-01 09:11:58.703195 35 +2013-03-01 09:11:58.703196 32 +2013-03-01 09:11:58.703197 33 +2013-03-01 09:11:58.703198 44 +2013-03-01 09:11:58.703199 30 +2013-03-01 09:11:58.7032 44 +2013-03-01 09:11:58.703201 32 +2013-03-01 09:11:58.703202 45 +2013-03-01 09:11:58.703203 34 +2013-03-01 09:11:58.703204 39 +2013-03-01 09:11:58.703205 39 +2013-03-01 09:11:58.703206 39 +2013-03-01 09:11:58.703207 40 +2013-03-01 09:11:58.703208 32 +2013-03-01 09:11:58.703209 38 +2013-03-01 09:11:58.70321 39 +2013-03-01 09:11:58.703211 42 +2013-03-01 09:11:58.703212 35 +2013-03-01 09:11:58.703213 41 +2013-03-01 09:11:58.703214 43 +2013-03-01 09:11:58.703215 33 +2013-03-01 09:11:58.703216 32 +2013-03-01 09:11:58.703217 39 +2013-03-01 09:11:58.703218 40 +2013-03-01 09:11:58.703219 46 +2013-03-01 09:11:58.70322 46 +2013-03-01 09:11:58.703221 38 +2013-03-01 09:11:58.703222 39 +2013-03-01 09:11:58.703223 47 +2013-03-01 09:11:58.703224 38 +2013-03-01 09:11:58.703225 32 +2013-03-01 09:11:58.703226 46 +2013-03-01 09:11:58.703227 49 +2013-03-01 09:11:58.703228 38 +2013-03-01 09:11:58.703229 32 +2013-03-01 09:11:58.70323 43 +2013-03-01 09:11:58.703231 45 +2013-03-01 09:11:58.703232 34 +2013-03-01 09:11:58.703233 51 +2013-03-01 09:11:58.703234 43 +2013-03-01 09:11:58.703235 40 +2013-03-01 09:11:58.703236 41 +2013-03-01 09:11:58.703237 37 +2013-03-01 09:11:58.703238 35 +2013-03-01 09:11:58.703239 46 +2013-03-01 09:11:58.70324 28 +2013-03-01 09:11:58.703241 46 +2013-03-01 09:11:58.703242 50 +2013-03-01 09:11:58.703243 46 +2013-03-01 09:11:58.703244 32 +2013-03-01 09:11:58.703245 42 +2013-03-01 09:11:58.703246 41 +2013-03-01 09:11:58.703247 33 +2013-03-01 09:11:58.703248 45 +2013-03-01 09:11:58.703249 37 +2013-03-01 09:11:58.70325 35 +2013-03-01 09:11:58.703251 35 +2013-03-01 09:11:58.703252 39 +2013-03-01 09:11:58.703253 24 +2013-03-01 09:11:58.703254 40 +2013-03-01 09:11:58.703255 32 +2013-03-01 09:11:58.703256 42 +2013-03-01 09:11:58.703257 49 +2013-03-01 09:11:58.703258 41 +2013-03-01 09:11:58.703259 33 +2013-03-01 09:11:58.70326 42 +2013-03-01 09:11:58.703261 33 +2013-03-01 09:11:58.703262 43 +2013-03-01 09:11:58.703263 42 +2013-03-01 09:11:58.703264 32 +2013-03-01 09:11:58.703265 49 +2013-03-01 09:11:58.703266 39 +2013-03-01 09:11:58.703267 41 +2013-03-01 09:11:58.703268 44 +2013-03-01 09:11:58.703269 40 +2013-03-01 09:11:58.70327 37 +2013-03-01 09:11:58.703271 39 +2013-03-01 09:11:58.703272 47 +2013-03-01 09:11:58.703273 44 +2013-03-01 09:11:58.703274 36 +2013-03-01 09:11:58.703275 41 +2013-03-01 09:11:58.703276 42 +2013-03-01 09:11:58.703277 46 +2013-03-01 09:11:58.703278 49 +2013-03-01 09:11:58.703279 36 +2013-03-01 09:11:58.70328 40 +2013-03-01 09:11:58.703281 38 +2013-03-01 09:11:58.703282 40 +2013-03-01 09:11:58.703283 51 +2013-03-01 09:11:58.703284 29 +2013-03-01 09:11:58.703285 28 +2013-03-01 09:11:58.703286 44 +2013-03-01 09:11:58.703287 55 +2013-03-01 09:11:58.703288 41 +2013-03-01 09:11:58.703289 42 +2013-03-01 09:11:58.70329 41 +2013-03-01 09:11:58.703291 40 +2013-03-01 09:11:58.703292 41 +2013-03-01 09:11:58.703293 29 +2013-03-01 09:11:58.703294 38 +2013-03-01 09:11:58.703295 38 +2013-03-01 09:11:58.703296 41 +2013-03-01 09:11:58.703297 41 +2013-03-01 09:11:58.703298 34 +2013-03-01 09:11:58.703299 46 +2013-03-01 09:11:58.7033 40 +2013-03-01 09:11:58.703301 35 +2013-03-01 09:11:58.703302 35 +2013-03-01 09:11:58.703303 41 +2013-03-01 09:11:58.703304 32 +2013-03-01 09:11:58.703305 51 +2013-03-01 09:11:58.703306 38 +2013-03-01 09:11:58.703307 38 +2013-03-01 09:11:58.703308 40 +2013-03-01 09:11:58.703309 31 +2013-03-01 09:11:58.70331 48 +2013-03-01 09:11:58.703311 33 +2013-03-01 09:11:58.703312 46 +2013-03-01 09:11:58.703313 43 +2013-03-01 09:11:58.703314 37 +2013-03-01 09:11:58.703315 36 +2013-03-01 09:11:58.703316 42 +2013-03-01 09:11:58.703317 47 +2013-03-01 09:11:58.703318 39 +2013-03-01 09:11:58.703319 45 +2013-03-01 09:11:58.70332 45 +2013-03-01 09:11:58.703321 37 +2013-03-01 09:11:58.703322 35 +2013-03-01 09:11:58.703323 45 +2013-03-01 09:11:58.703324 35 +2013-03-01 09:11:58.703325 39 +PREHOOK: query: explain vectorization expression +select ts, count(d) from over10k group by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select ts, count(d) from over10k group by ts +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: d (type: double), ts (type: timestamp) + outputColumnNames: d, ts + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [5, 8] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(d) + Group By Vectorization: + aggregators: VectorUDAFCount(col 5:double) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 8:timestamp + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: ts (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts, count(d) from over10k group by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select ts, count(d) from over10k group by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +2013-03-01 09:11:58.70307 26 +2013-03-01 09:11:58.703071 50 +2013-03-01 09:11:58.703072 32 +2013-03-01 09:11:58.703073 42 +2013-03-01 09:11:58.703074 45 +2013-03-01 09:11:58.703075 38 +2013-03-01 09:11:58.703076 45 +2013-03-01 09:11:58.703077 50 +2013-03-01 09:11:58.703078 24 +2013-03-01 09:11:58.703079 43 +2013-03-01 09:11:58.70308 25 +2013-03-01 09:11:58.703081 36 +2013-03-01 09:11:58.703082 38 +2013-03-01 09:11:58.703083 42 +2013-03-01 09:11:58.703084 44 +2013-03-01 09:11:58.703085 33 +2013-03-01 09:11:58.703086 40 +2013-03-01 09:11:58.703087 39 +2013-03-01 09:11:58.703088 30 +2013-03-01 09:11:58.703089 36 +2013-03-01 09:11:58.70309 35 +2013-03-01 09:11:58.703091 38 +2013-03-01 09:11:58.703092 38 +2013-03-01 09:11:58.703093 43 +2013-03-01 09:11:58.703094 44 +2013-03-01 09:11:58.703095 29 +2013-03-01 09:11:58.703096 45 +2013-03-01 09:11:58.703097 37 +2013-03-01 09:11:58.703098 36 +2013-03-01 09:11:58.703099 39 +2013-03-01 09:11:58.7031 34 +2013-03-01 09:11:58.703101 27 +2013-03-01 09:11:58.703102 45 +2013-03-01 09:11:58.703103 38 +2013-03-01 09:11:58.703104 45 +2013-03-01 09:11:58.703105 40 +2013-03-01 09:11:58.703106 43 +2013-03-01 09:11:58.703107 34 +2013-03-01 09:11:58.703108 38 +2013-03-01 09:11:58.703109 35 +2013-03-01 09:11:58.70311 44 +2013-03-01 09:11:58.703111 44 +2013-03-01 09:11:58.703112 45 +2013-03-01 09:11:58.703113 44 +2013-03-01 09:11:58.703114 24 +2013-03-01 09:11:58.703115 38 +2013-03-01 09:11:58.703116 26 +2013-03-01 09:11:58.703117 39 +2013-03-01 09:11:58.703118 27 +2013-03-01 09:11:58.703119 35 +2013-03-01 09:11:58.70312 43 +2013-03-01 09:11:58.703121 45 +2013-03-01 09:11:58.703122 34 +2013-03-01 09:11:58.703123 36 +2013-03-01 09:11:58.703124 50 +2013-03-01 09:11:58.703125 40 +2013-03-01 09:11:58.703126 36 +2013-03-01 09:11:58.703127 45 +2013-03-01 09:11:58.703128 36 +2013-03-01 09:11:58.703129 42 +2013-03-01 09:11:58.70313 41 +2013-03-01 09:11:58.703131 47 +2013-03-01 09:11:58.703132 42 +2013-03-01 09:11:58.703133 42 +2013-03-01 09:11:58.703134 33 +2013-03-01 09:11:58.703135 43 +2013-03-01 09:11:58.703136 38 +2013-03-01 09:11:58.703137 36 +2013-03-01 09:11:58.703138 43 +2013-03-01 09:11:58.703139 45 +2013-03-01 09:11:58.70314 43 +2013-03-01 09:11:58.703141 41 +2013-03-01 09:11:58.703142 38 +2013-03-01 09:11:58.703143 45 +2013-03-01 09:11:58.703144 41 +2013-03-01 09:11:58.703145 36 +2013-03-01 09:11:58.703146 37 +2013-03-01 09:11:58.703147 34 +2013-03-01 09:11:58.703148 31 +2013-03-01 09:11:58.703149 31 +2013-03-01 09:11:58.70315 42 +2013-03-01 09:11:58.703151 42 +2013-03-01 09:11:58.703152 40 +2013-03-01 09:11:58.703153 26 +2013-03-01 09:11:58.703154 28 +2013-03-01 09:11:58.703155 31 +2013-03-01 09:11:58.703156 47 +2013-03-01 09:11:58.703157 46 +2013-03-01 09:11:58.703158 38 +2013-03-01 09:11:58.703159 33 +2013-03-01 09:11:58.70316 35 +2013-03-01 09:11:58.703161 36 +2013-03-01 09:11:58.703162 42 +2013-03-01 09:11:58.703163 37 +2013-03-01 09:11:58.703164 39 +2013-03-01 09:11:58.703165 33 +2013-03-01 09:11:58.703166 44 +2013-03-01 09:11:58.703167 44 +2013-03-01 09:11:58.703168 40 +2013-03-01 09:11:58.703169 38 +2013-03-01 09:11:58.70317 42 +2013-03-01 09:11:58.703171 43 +2013-03-01 09:11:58.703172 34 +2013-03-01 09:11:58.703173 44 +2013-03-01 09:11:58.703174 35 +2013-03-01 09:11:58.703175 41 +2013-03-01 09:11:58.703176 42 +2013-03-01 09:11:58.703177 43 +2013-03-01 09:11:58.703178 42 +2013-03-01 09:11:58.703179 39 +2013-03-01 09:11:58.70318 46 +2013-03-01 09:11:58.703181 30 +2013-03-01 09:11:58.703182 40 +2013-03-01 09:11:58.703183 38 +2013-03-01 09:11:58.703184 37 +2013-03-01 09:11:58.703185 33 +2013-03-01 09:11:58.703186 40 +2013-03-01 09:11:58.703187 25 +2013-03-01 09:11:58.703188 34 +2013-03-01 09:11:58.703189 39 +2013-03-01 09:11:58.70319 44 +2013-03-01 09:11:58.703191 37 +2013-03-01 09:11:58.703192 46 +2013-03-01 09:11:58.703193 32 +2013-03-01 09:11:58.703194 43 +2013-03-01 09:11:58.703195 35 +2013-03-01 09:11:58.703196 32 +2013-03-01 09:11:58.703197 33 +2013-03-01 09:11:58.703198 44 +2013-03-01 09:11:58.703199 30 +2013-03-01 09:11:58.7032 44 +2013-03-01 09:11:58.703201 32 +2013-03-01 09:11:58.703202 45 +2013-03-01 09:11:58.703203 34 +2013-03-01 09:11:58.703204 39 +2013-03-01 09:11:58.703205 39 +2013-03-01 09:11:58.703206 39 +2013-03-01 09:11:58.703207 40 +2013-03-01 09:11:58.703208 32 +2013-03-01 09:11:58.703209 38 +2013-03-01 09:11:58.70321 39 +2013-03-01 09:11:58.703211 42 +2013-03-01 09:11:58.703212 35 +2013-03-01 09:11:58.703213 41 +2013-03-01 09:11:58.703214 43 +2013-03-01 09:11:58.703215 33 +2013-03-01 09:11:58.703216 32 +2013-03-01 09:11:58.703217 39 +2013-03-01 09:11:58.703218 40 +2013-03-01 09:11:58.703219 46 +2013-03-01 09:11:58.70322 46 +2013-03-01 09:11:58.703221 38 +2013-03-01 09:11:58.703222 39 +2013-03-01 09:11:58.703223 47 +2013-03-01 09:11:58.703224 38 +2013-03-01 09:11:58.703225 32 +2013-03-01 09:11:58.703226 46 +2013-03-01 09:11:58.703227 49 +2013-03-01 09:11:58.703228 38 +2013-03-01 09:11:58.703229 32 +2013-03-01 09:11:58.70323 43 +2013-03-01 09:11:58.703231 45 +2013-03-01 09:11:58.703232 34 +2013-03-01 09:11:58.703233 51 +2013-03-01 09:11:58.703234 43 +2013-03-01 09:11:58.703235 40 +2013-03-01 09:11:58.703236 41 +2013-03-01 09:11:58.703237 37 +2013-03-01 09:11:58.703238 35 +2013-03-01 09:11:58.703239 46 +2013-03-01 09:11:58.70324 28 +2013-03-01 09:11:58.703241 46 +2013-03-01 09:11:58.703242 50 +2013-03-01 09:11:58.703243 46 +2013-03-01 09:11:58.703244 32 +2013-03-01 09:11:58.703245 42 +2013-03-01 09:11:58.703246 41 +2013-03-01 09:11:58.703247 33 +2013-03-01 09:11:58.703248 45 +2013-03-01 09:11:58.703249 37 +2013-03-01 09:11:58.70325 35 +2013-03-01 09:11:58.703251 35 +2013-03-01 09:11:58.703252 39 +2013-03-01 09:11:58.703253 24 +2013-03-01 09:11:58.703254 40 +2013-03-01 09:11:58.703255 32 +2013-03-01 09:11:58.703256 42 +2013-03-01 09:11:58.703257 49 +2013-03-01 09:11:58.703258 41 +2013-03-01 09:11:58.703259 33 +2013-03-01 09:11:58.70326 42 +2013-03-01 09:11:58.703261 33 +2013-03-01 09:11:58.703262 43 +2013-03-01 09:11:58.703263 42 +2013-03-01 09:11:58.703264 32 +2013-03-01 09:11:58.703265 49 +2013-03-01 09:11:58.703266 39 +2013-03-01 09:11:58.703267 41 +2013-03-01 09:11:58.703268 44 +2013-03-01 09:11:58.703269 40 +2013-03-01 09:11:58.70327 37 +2013-03-01 09:11:58.703271 39 +2013-03-01 09:11:58.703272 47 +2013-03-01 09:11:58.703273 44 +2013-03-01 09:11:58.703274 36 +2013-03-01 09:11:58.703275 41 +2013-03-01 09:11:58.703276 42 +2013-03-01 09:11:58.703277 46 +2013-03-01 09:11:58.703278 49 +2013-03-01 09:11:58.703279 36 +2013-03-01 09:11:58.70328 40 +2013-03-01 09:11:58.703281 38 +2013-03-01 09:11:58.703282 40 +2013-03-01 09:11:58.703283 51 +2013-03-01 09:11:58.703284 29 +2013-03-01 09:11:58.703285 28 +2013-03-01 09:11:58.703286 44 +2013-03-01 09:11:58.703287 55 +2013-03-01 09:11:58.703288 41 +2013-03-01 09:11:58.703289 42 +2013-03-01 09:11:58.70329 41 +2013-03-01 09:11:58.703291 40 +2013-03-01 09:11:58.703292 41 +2013-03-01 09:11:58.703293 29 +2013-03-01 09:11:58.703294 38 +2013-03-01 09:11:58.703295 38 +2013-03-01 09:11:58.703296 41 +2013-03-01 09:11:58.703297 41 +2013-03-01 09:11:58.703298 34 +2013-03-01 09:11:58.703299 46 +2013-03-01 09:11:58.7033 40 +2013-03-01 09:11:58.703301 35 +2013-03-01 09:11:58.703302 35 +2013-03-01 09:11:58.703303 41 +2013-03-01 09:11:58.703304 32 +2013-03-01 09:11:58.703305 51 +2013-03-01 09:11:58.703306 38 +2013-03-01 09:11:58.703307 38 +2013-03-01 09:11:58.703308 40 +2013-03-01 09:11:58.703309 31 +2013-03-01 09:11:58.70331 48 +2013-03-01 09:11:58.703311 33 +2013-03-01 09:11:58.703312 46 +2013-03-01 09:11:58.703313 43 +2013-03-01 09:11:58.703314 37 +2013-03-01 09:11:58.703315 36 +2013-03-01 09:11:58.703316 42 +2013-03-01 09:11:58.703317 47 +2013-03-01 09:11:58.703318 39 +2013-03-01 09:11:58.703319 45 +2013-03-01 09:11:58.70332 45 +2013-03-01 09:11:58.703321 37 +2013-03-01 09:11:58.703322 35 +2013-03-01 09:11:58.703323 45 +2013-03-01 09:11:58.703324 35 +2013-03-01 09:11:58.703325 39 +PREHOOK: query: explain vectorization expression +select ts, count(*) from over10k group by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select ts, count(*) from over10k group by ts +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: ts + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [8] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + aggregators: VectorUDAFCountStar(*) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 8:timestamp + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: ts (type: timestamp) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Map-reduce partition columns: _col0 (type: timestamp) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: timestamp) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts, count(*) from over10k group by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select ts, count(*) from over10k group by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +2013-03-01 09:11:58.70307 26 +2013-03-01 09:11:58.703071 50 +2013-03-01 09:11:58.703072 32 +2013-03-01 09:11:58.703073 42 +2013-03-01 09:11:58.703074 45 +2013-03-01 09:11:58.703075 38 +2013-03-01 09:11:58.703076 45 +2013-03-01 09:11:58.703077 50 +2013-03-01 09:11:58.703078 24 +2013-03-01 09:11:58.703079 43 +2013-03-01 09:11:58.70308 25 +2013-03-01 09:11:58.703081 36 +2013-03-01 09:11:58.703082 38 +2013-03-01 09:11:58.703083 42 +2013-03-01 09:11:58.703084 44 +2013-03-01 09:11:58.703085 33 +2013-03-01 09:11:58.703086 40 +2013-03-01 09:11:58.703087 39 +2013-03-01 09:11:58.703088 30 +2013-03-01 09:11:58.703089 36 +2013-03-01 09:11:58.70309 35 +2013-03-01 09:11:58.703091 38 +2013-03-01 09:11:58.703092 38 +2013-03-01 09:11:58.703093 43 +2013-03-01 09:11:58.703094 44 +2013-03-01 09:11:58.703095 29 +2013-03-01 09:11:58.703096 45 +2013-03-01 09:11:58.703097 37 +2013-03-01 09:11:58.703098 36 +2013-03-01 09:11:58.703099 39 +2013-03-01 09:11:58.7031 34 +2013-03-01 09:11:58.703101 27 +2013-03-01 09:11:58.703102 45 +2013-03-01 09:11:58.703103 38 +2013-03-01 09:11:58.703104 45 +2013-03-01 09:11:58.703105 40 +2013-03-01 09:11:58.703106 43 +2013-03-01 09:11:58.703107 34 +2013-03-01 09:11:58.703108 38 +2013-03-01 09:11:58.703109 35 +2013-03-01 09:11:58.70311 44 +2013-03-01 09:11:58.703111 44 +2013-03-01 09:11:58.703112 45 +2013-03-01 09:11:58.703113 44 +2013-03-01 09:11:58.703114 24 +2013-03-01 09:11:58.703115 38 +2013-03-01 09:11:58.703116 26 +2013-03-01 09:11:58.703117 39 +2013-03-01 09:11:58.703118 27 +2013-03-01 09:11:58.703119 35 +2013-03-01 09:11:58.70312 43 +2013-03-01 09:11:58.703121 45 +2013-03-01 09:11:58.703122 34 +2013-03-01 09:11:58.703123 36 +2013-03-01 09:11:58.703124 50 +2013-03-01 09:11:58.703125 40 +2013-03-01 09:11:58.703126 36 +2013-03-01 09:11:58.703127 45 +2013-03-01 09:11:58.703128 36 +2013-03-01 09:11:58.703129 42 +2013-03-01 09:11:58.70313 41 +2013-03-01 09:11:58.703131 47 +2013-03-01 09:11:58.703132 42 +2013-03-01 09:11:58.703133 42 +2013-03-01 09:11:58.703134 33 +2013-03-01 09:11:58.703135 43 +2013-03-01 09:11:58.703136 38 +2013-03-01 09:11:58.703137 36 +2013-03-01 09:11:58.703138 43 +2013-03-01 09:11:58.703139 45 +2013-03-01 09:11:58.70314 43 +2013-03-01 09:11:58.703141 41 +2013-03-01 09:11:58.703142 38 +2013-03-01 09:11:58.703143 45 +2013-03-01 09:11:58.703144 41 +2013-03-01 09:11:58.703145 36 +2013-03-01 09:11:58.703146 37 +2013-03-01 09:11:58.703147 34 +2013-03-01 09:11:58.703148 31 +2013-03-01 09:11:58.703149 31 +2013-03-01 09:11:58.70315 42 +2013-03-01 09:11:58.703151 42 +2013-03-01 09:11:58.703152 40 +2013-03-01 09:11:58.703153 26 +2013-03-01 09:11:58.703154 28 +2013-03-01 09:11:58.703155 31 +2013-03-01 09:11:58.703156 47 +2013-03-01 09:11:58.703157 46 +2013-03-01 09:11:58.703158 38 +2013-03-01 09:11:58.703159 33 +2013-03-01 09:11:58.70316 35 +2013-03-01 09:11:58.703161 36 +2013-03-01 09:11:58.703162 42 +2013-03-01 09:11:58.703163 37 +2013-03-01 09:11:58.703164 39 +2013-03-01 09:11:58.703165 33 +2013-03-01 09:11:58.703166 44 +2013-03-01 09:11:58.703167 44 +2013-03-01 09:11:58.703168 40 +2013-03-01 09:11:58.703169 38 +2013-03-01 09:11:58.70317 42 +2013-03-01 09:11:58.703171 43 +2013-03-01 09:11:58.703172 34 +2013-03-01 09:11:58.703173 44 +2013-03-01 09:11:58.703174 35 +2013-03-01 09:11:58.703175 41 +2013-03-01 09:11:58.703176 42 +2013-03-01 09:11:58.703177 43 +2013-03-01 09:11:58.703178 42 +2013-03-01 09:11:58.703179 39 +2013-03-01 09:11:58.70318 46 +2013-03-01 09:11:58.703181 30 +2013-03-01 09:11:58.703182 40 +2013-03-01 09:11:58.703183 38 +2013-03-01 09:11:58.703184 37 +2013-03-01 09:11:58.703185 33 +2013-03-01 09:11:58.703186 40 +2013-03-01 09:11:58.703187 25 +2013-03-01 09:11:58.703188 34 +2013-03-01 09:11:58.703189 39 +2013-03-01 09:11:58.70319 44 +2013-03-01 09:11:58.703191 37 +2013-03-01 09:11:58.703192 46 +2013-03-01 09:11:58.703193 32 +2013-03-01 09:11:58.703194 43 +2013-03-01 09:11:58.703195 35 +2013-03-01 09:11:58.703196 32 +2013-03-01 09:11:58.703197 33 +2013-03-01 09:11:58.703198 44 +2013-03-01 09:11:58.703199 30 +2013-03-01 09:11:58.7032 44 +2013-03-01 09:11:58.703201 32 +2013-03-01 09:11:58.703202 45 +2013-03-01 09:11:58.703203 34 +2013-03-01 09:11:58.703204 39 +2013-03-01 09:11:58.703205 39 +2013-03-01 09:11:58.703206 39 +2013-03-01 09:11:58.703207 40 +2013-03-01 09:11:58.703208 32 +2013-03-01 09:11:58.703209 38 +2013-03-01 09:11:58.70321 39 +2013-03-01 09:11:58.703211 42 +2013-03-01 09:11:58.703212 35 +2013-03-01 09:11:58.703213 41 +2013-03-01 09:11:58.703214 43 +2013-03-01 09:11:58.703215 33 +2013-03-01 09:11:58.703216 32 +2013-03-01 09:11:58.703217 39 +2013-03-01 09:11:58.703218 40 +2013-03-01 09:11:58.703219 46 +2013-03-01 09:11:58.70322 46 +2013-03-01 09:11:58.703221 38 +2013-03-01 09:11:58.703222 39 +2013-03-01 09:11:58.703223 47 +2013-03-01 09:11:58.703224 38 +2013-03-01 09:11:58.703225 32 +2013-03-01 09:11:58.703226 46 +2013-03-01 09:11:58.703227 49 +2013-03-01 09:11:58.703228 38 +2013-03-01 09:11:58.703229 32 +2013-03-01 09:11:58.70323 43 +2013-03-01 09:11:58.703231 45 +2013-03-01 09:11:58.703232 34 +2013-03-01 09:11:58.703233 51 +2013-03-01 09:11:58.703234 43 +2013-03-01 09:11:58.703235 40 +2013-03-01 09:11:58.703236 41 +2013-03-01 09:11:58.703237 37 +2013-03-01 09:11:58.703238 35 +2013-03-01 09:11:58.703239 46 +2013-03-01 09:11:58.70324 28 +2013-03-01 09:11:58.703241 46 +2013-03-01 09:11:58.703242 50 +2013-03-01 09:11:58.703243 46 +2013-03-01 09:11:58.703244 32 +2013-03-01 09:11:58.703245 42 +2013-03-01 09:11:58.703246 41 +2013-03-01 09:11:58.703247 33 +2013-03-01 09:11:58.703248 45 +2013-03-01 09:11:58.703249 37 +2013-03-01 09:11:58.70325 35 +2013-03-01 09:11:58.703251 35 +2013-03-01 09:11:58.703252 39 +2013-03-01 09:11:58.703253 24 +2013-03-01 09:11:58.703254 40 +2013-03-01 09:11:58.703255 32 +2013-03-01 09:11:58.703256 42 +2013-03-01 09:11:58.703257 49 +2013-03-01 09:11:58.703258 41 +2013-03-01 09:11:58.703259 33 +2013-03-01 09:11:58.70326 42 +2013-03-01 09:11:58.703261 33 +2013-03-01 09:11:58.703262 43 +2013-03-01 09:11:58.703263 42 +2013-03-01 09:11:58.703264 32 +2013-03-01 09:11:58.703265 49 +2013-03-01 09:11:58.703266 39 +2013-03-01 09:11:58.703267 41 +2013-03-01 09:11:58.703268 44 +2013-03-01 09:11:58.703269 40 +2013-03-01 09:11:58.70327 37 +2013-03-01 09:11:58.703271 39 +2013-03-01 09:11:58.703272 47 +2013-03-01 09:11:58.703273 44 +2013-03-01 09:11:58.703274 36 +2013-03-01 09:11:58.703275 41 +2013-03-01 09:11:58.703276 42 +2013-03-01 09:11:58.703277 46 +2013-03-01 09:11:58.703278 49 +2013-03-01 09:11:58.703279 36 +2013-03-01 09:11:58.70328 40 +2013-03-01 09:11:58.703281 38 +2013-03-01 09:11:58.703282 40 +2013-03-01 09:11:58.703283 51 +2013-03-01 09:11:58.703284 29 +2013-03-01 09:11:58.703285 28 +2013-03-01 09:11:58.703286 44 +2013-03-01 09:11:58.703287 55 +2013-03-01 09:11:58.703288 41 +2013-03-01 09:11:58.703289 42 +2013-03-01 09:11:58.70329 41 +2013-03-01 09:11:58.703291 40 +2013-03-01 09:11:58.703292 41 +2013-03-01 09:11:58.703293 29 +2013-03-01 09:11:58.703294 38 +2013-03-01 09:11:58.703295 38 +2013-03-01 09:11:58.703296 41 +2013-03-01 09:11:58.703297 41 +2013-03-01 09:11:58.703298 34 +2013-03-01 09:11:58.703299 46 +2013-03-01 09:11:58.7033 40 +2013-03-01 09:11:58.703301 35 +2013-03-01 09:11:58.703302 35 +2013-03-01 09:11:58.703303 41 +2013-03-01 09:11:58.703304 32 +2013-03-01 09:11:58.703305 51 +2013-03-01 09:11:58.703306 38 +2013-03-01 09:11:58.703307 38 +2013-03-01 09:11:58.703308 40 +2013-03-01 09:11:58.703309 31 +2013-03-01 09:11:58.70331 48 +2013-03-01 09:11:58.703311 33 +2013-03-01 09:11:58.703312 46 +2013-03-01 09:11:58.703313 43 +2013-03-01 09:11:58.703314 37 +2013-03-01 09:11:58.703315 36 +2013-03-01 09:11:58.703316 42 +2013-03-01 09:11:58.703317 47 +2013-03-01 09:11:58.703318 39 +2013-03-01 09:11:58.703319 45 +2013-03-01 09:11:58.70332 45 +2013-03-01 09:11:58.703321 37 +2013-03-01 09:11:58.703322 35 +2013-03-01 09:11:58.703323 45 +2013-03-01 09:11:58.703324 35 +2013-03-01 09:11:58.703325 39 +PREHOOK: query: explain vectorization expression +select `dec`, count(`dec`) from over10k group by `dec` +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select `dec`, count(`dec`) from over10k group by `dec` +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: dec (type: decimal(4,2)) + outputColumnNames: dec + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [9] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(dec) + Group By Vectorization: + aggregators: VectorUDAFCount(col 9:decimal(4,2)/DECIMAL_64) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2) + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: dec (type: decimal(4,2)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Map-reduce partition columns: _col0 (type: decimal(4,2)) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: decimal(4,2)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select `dec`, count(`dec`) from over10k group by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select `dec`, count(`dec`) from over10k group by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +0.01 2 +0.02 1 +0.03 2 +0.04 1 +0.05 1 +0.06 3 +0.07 1 +0.08 3 +0.10 1 +0.11 1 +0.15 1 +0.17 1 +0.21 2 +0.22 1 +0.23 2 +0.24 1 +0.25 2 +0.26 1 +0.27 1 +0.29 1 +0.30 2 +0.31 3 +0.32 1 +0.33 1 +0.34 3 +0.35 2 +0.36 1 +0.37 2 +0.38 3 +0.39 1 +0.40 2 +0.42 1 +0.45 2 +0.46 3 +0.47 1 +0.50 1 +0.51 1 +0.55 1 +0.56 4 +0.57 1 +0.58 1 +0.64 3 +0.66 1 +0.67 1 +0.68 1 +0.70 1 +0.72 1 +0.73 1 +0.75 1 +0.77 1 +0.79 1 +0.80 2 +0.81 1 +0.83 1 +0.84 1 +0.85 1 +0.86 1 +0.87 1 +0.88 1 +0.89 1 +0.90 1 +0.91 2 +0.92 2 +0.93 1 +0.94 1 +0.96 1 +0.97 4 +0.98 2 +0.99 2 +1.00 2 +1.01 2 +1.04 1 +1.05 1 +1.07 2 +1.10 2 +1.11 1 +1.12 1 +1.13 1 +1.15 1 +1.16 1 +1.17 1 +1.19 1 +1.20 2 +1.22 2 +1.23 2 +1.24 2 +1.25 3 +1.27 1 +1.29 3 +1.31 1 +1.32 1 +1.33 1 +1.34 2 +1.35 2 +1.36 2 +1.37 2 +1.38 2 +1.39 1 +1.40 2 +1.42 2 +1.43 3 +1.45 2 +1.46 1 +1.48 1 +1.49 3 +1.50 2 +1.52 1 +1.55 2 +1.56 2 +1.59 1 +1.62 1 +1.65 1 +1.66 1 +1.67 1 +1.68 2 +1.69 1 +1.70 1 +1.72 4 +1.74 1 +1.75 2 +1.76 2 +1.77 1 +1.78 1 +1.79 1 +1.81 2 +1.82 1 +1.85 1 +1.86 1 +1.89 1 +1.94 1 +1.97 3 +1.99 1 +10.00 2 +10.03 2 +10.04 1 +10.06 1 +10.07 1 +10.08 2 +10.11 3 +10.12 1 +10.14 2 +10.15 2 +10.16 4 +10.20 2 +10.22 1 +10.28 1 +10.29 1 +10.30 1 +10.32 4 +10.33 1 +10.34 1 +10.35 3 +10.36 2 +10.37 1 +10.38 1 +10.39 1 +10.40 3 +10.41 1 +10.42 2 +10.43 1 +10.44 1 +10.45 1 +10.47 2 +10.48 2 +10.50 1 +10.51 1 +10.52 4 +10.54 1 +10.56 2 +10.59 2 +10.60 1 +10.62 1 +10.63 3 +10.65 1 +10.66 1 +10.67 2 +10.69 1 +10.70 2 +10.71 1 +10.72 2 +10.74 3 +10.75 1 +10.76 2 +10.77 2 +10.78 3 +10.79 1 +10.80 1 +10.81 1 +10.82 1 +10.83 1 +10.86 4 +10.88 3 +10.90 1 +10.91 2 +10.93 2 +10.94 2 +10.95 1 +10.98 1 +10.99 2 +11.00 1 +11.02 2 +11.04 1 +11.06 2 +11.10 1 +11.11 1 +11.12 2 +11.14 1 +11.15 3 +11.16 3 +11.17 1 +11.19 1 +11.20 4 +11.21 3 +11.23 2 +11.24 2 +11.25 1 +11.26 1 +11.27 1 +11.28 2 +11.29 1 +11.31 2 +11.32 2 +11.34 2 +11.36 1 +11.37 6 +11.38 2 +11.40 3 +11.41 1 +11.43 2 +11.44 1 +11.45 2 +11.46 1 +11.54 1 +11.56 2 +11.58 1 +11.61 1 +11.62 2 +11.63 2 +11.64 1 +11.65 3 +11.66 2 +11.67 1 +11.69 4 +11.70 2 +11.71 2 +11.73 1 +11.74 1 +11.76 1 +11.77 1 +11.78 3 +11.80 1 +11.81 3 +11.82 1 +11.83 1 +11.86 1 +11.87 5 +11.90 2 +11.91 2 +11.93 3 +11.95 1 +11.99 2 +12.03 2 +12.04 1 +12.05 1 +12.07 3 +12.08 2 +12.10 1 +12.11 2 +12.12 1 +12.13 1 +12.14 1 +12.15 1 +12.18 1 +12.22 1 +12.23 1 +12.26 1 +12.27 1 +12.30 2 +12.31 1 +12.34 1 +12.36 2 +12.38 1 +12.39 1 +12.40 1 +12.41 1 +12.43 1 +12.44 1 +12.45 1 +12.47 1 +12.48 2 +12.50 1 +12.51 1 +12.53 2 +12.54 1 +12.55 2 +12.56 1 +12.58 1 +12.59 2 +12.60 1 +12.61 3 +12.63 1 +12.65 2 +12.66 1 +12.67 3 +12.71 1 +12.72 3 +12.73 1 +12.75 1 +12.76 3 +12.77 3 +12.78 2 +12.79 1 +12.81 1 +12.83 1 +12.86 1 +12.87 1 +12.92 5 +12.95 1 +12.98 1 +12.99 2 +13.01 1 +13.03 1 +13.04 4 +13.05 1 +13.06 2 +13.07 2 +13.08 2 +13.09 2 +13.10 2 +13.11 1 +13.12 1 +13.13 1 +13.15 2 +13.16 2 +13.18 1 +13.19 1 +13.20 2 +13.21 1 +13.23 1 +13.25 2 +13.26 1 +13.27 1 +13.28 1 +13.29 1 +13.30 1 +13.32 2 +13.33 1 +13.34 1 +13.35 1 +13.36 1 +13.38 2 +13.40 1 +13.41 1 +13.43 1 +13.44 1 +13.45 1 +13.47 1 +13.49 2 +13.51 1 +13.56 2 +13.58 1 +13.59 1 +13.60 1 +13.61 1 +13.68 1 +13.70 1 +13.71 1 +13.72 1 +13.75 1 +13.77 1 +13.78 2 +13.80 1 +13.81 3 +13.82 1 +13.85 1 +13.86 2 +13.88 1 +13.89 2 +13.90 1 +13.91 2 +13.92 2 +13.93 1 +13.94 2 +13.95 1 +13.96 2 +13.97 3 +14.00 2 +14.01 3 +14.03 1 +14.04 2 +14.07 1 +14.08 1 +14.09 2 +14.10 2 +14.12 2 +14.14 3 +14.16 2 +14.17 1 +14.18 1 +14.19 1 +14.21 2 +14.23 2 +14.24 2 +14.26 1 +14.27 1 +14.28 3 +14.32 3 +14.33 2 +14.35 5 +14.36 1 +14.38 1 +14.39 2 +14.40 3 +14.41 1 +14.42 2 +14.46 1 +14.47 1 +14.48 1 +14.49 2 +14.52 1 +14.54 1 +14.55 3 +14.56 1 +14.57 1 +14.58 1 +14.59 1 +14.63 1 +14.64 1 +14.65 1 +14.66 2 +14.68 2 +14.69 4 +14.71 1 +14.73 1 +14.74 2 +14.75 1 +14.76 1 +14.78 2 +14.80 1 +14.81 1 +14.83 2 +14.85 1 +14.88 2 +14.89 1 +14.91 3 +14.92 1 +14.94 3 +14.96 1 +15.01 1 +15.02 1 +15.07 1 +15.09 1 +15.13 1 +15.14 2 +15.15 1 +15.16 1 +15.17 1 +15.19 2 +15.20 2 +15.22 5 +15.23 2 +15.26 2 +15.27 1 +15.28 2 +15.29 1 +15.30 2 +15.31 1 +15.32 1 +15.35 1 +15.36 1 +15.37 1 +15.38 2 +15.40 1 +15.42 3 +15.43 1 +15.46 1 +15.47 2 +15.50 1 +15.52 1 +15.54 3 +15.55 2 +15.57 1 +15.58 1 +15.60 3 +15.63 1 +15.65 2 +15.67 2 +15.69 2 +15.70 4 +15.75 1 +15.76 1 +15.79 1 +15.80 1 +15.81 2 +15.82 2 +15.83 2 +15.85 1 +15.87 2 +15.88 1 +15.89 2 +15.90 2 +15.96 1 +15.98 2 +15.99 2 +16.00 2 +16.01 1 +16.03 1 +16.05 2 +16.06 3 +16.09 1 +16.10 1 +16.11 1 +16.13 1 +16.14 1 +16.15 3 +16.17 2 +16.18 2 +16.19 1 +16.21 1 +16.22 1 +16.23 1 +16.24 1 +16.25 1 +16.26 3 +16.27 3 +16.29 1 +16.32 1 +16.34 1 +16.35 1 +16.38 1 +16.39 3 +16.40 1 +16.41 1 +16.43 1 +16.48 1 +16.49 1 +16.51 1 +16.53 1 +16.54 1 +16.55 1 +16.57 2 +16.58 1 +16.59 3 +16.60 2 +16.61 1 +16.63 2 +16.66 1 +16.67 1 +16.68 2 +16.70 1 +16.72 1 +16.74 1 +16.75 1 +16.76 1 +16.77 1 +16.79 1 +16.81 1 +16.84 1 +16.85 2 +16.86 2 +16.87 5 +16.89 2 +16.91 1 +16.92 1 +16.93 2 +16.94 1 +16.95 1 +17.00 1 +17.01 1 +17.02 1 +17.03 1 +17.05 2 +17.06 1 +17.07 2 +17.08 1 +17.09 4 +17.12 1 +17.13 2 +17.15 1 +17.17 3 +17.19 2 +17.20 1 +17.21 1 +17.23 3 +17.25 1 +17.26 1 +17.27 3 +17.29 2 +17.31 1 +17.33 1 +17.35 1 +17.38 1 +17.39 1 +17.41 1 +17.42 2 +17.45 1 +17.47 1 +17.48 1 +17.49 1 +17.50 1 +17.52 2 +17.53 1 +17.57 1 +17.58 2 +17.59 1 +17.60 1 +17.61 1 +17.62 1 +17.64 2 +17.65 1 +17.66 1 +17.67 1 +17.68 4 +17.71 1 +17.72 1 +17.73 1 +17.74 1 +17.75 2 +17.77 2 +17.78 1 +17.79 2 +17.83 1 +17.84 1 +17.86 1 +17.87 1 +17.89 1 +17.92 2 +17.93 1 +17.95 1 +17.98 1 +18.00 2 +18.01 1 +18.02 3 +18.03 2 +18.04 1 +18.05 1 +18.06 1 +18.08 4 +18.10 1 +18.11 3 +18.12 4 +18.13 1 +18.14 5 +18.15 2 +18.16 1 +18.17 1 +18.18 1 +18.19 1 +18.20 3 +18.21 3 +18.23 2 +18.24 1 +18.25 1 +18.27 1 +18.28 2 +18.30 2 +18.31 1 +18.32 1 +18.35 2 +18.36 1 +18.37 2 +18.38 2 +18.39 1 +18.42 2 +18.43 1 +18.46 1 +18.47 1 +18.49 2 +18.50 1 +18.51 2 +18.52 1 +18.55 1 +18.56 1 +18.57 2 +18.64 1 +18.65 1 +18.66 1 +18.67 1 +18.68 4 +18.69 3 +18.70 2 +18.73 1 +18.74 2 +18.75 1 +18.76 1 +18.77 1 +18.78 1 +18.79 1 +18.80 2 +18.83 1 +18.85 2 +18.86 2 +18.88 3 +18.89 1 +18.90 1 +18.94 1 +18.96 1 +18.98 1 +19.00 1 +19.01 2 +19.02 1 +19.03 1 +19.04 2 +19.06 1 +19.07 2 +19.08 2 +19.10 2 +19.11 3 +19.13 2 +19.14 4 +19.15 1 +19.16 1 +19.17 2 +19.18 2 +19.19 2 +19.21 1 +19.24 2 +19.26 2 +19.27 1 +19.28 2 +19.30 4 +19.31 1 +19.32 1 +19.33 1 +19.34 1 +19.37 1 +19.42 2 +19.43 1 +19.44 2 +19.45 1 +19.46 2 +19.47 2 +19.51 1 +19.53 2 +19.54 1 +19.55 3 +19.57 1 +19.58 2 +19.60 1 +19.61 3 +19.62 1 +19.63 2 +19.64 3 +19.65 1 +19.68 3 +19.69 1 +19.71 1 +19.72 1 +19.73 1 +19.77 2 +19.78 3 +19.79 1 +19.81 1 +19.82 1 +19.83 1 +19.84 4 +19.85 1 +19.87 3 +19.88 2 +19.89 1 +19.90 1 +19.91 2 +19.93 3 +19.95 3 +19.96 1 +19.97 1 +19.99 1 +2.03 1 +2.04 2 +2.06 2 +2.07 3 +2.08 1 +2.10 2 +2.11 1 +2.14 1 +2.15 3 +2.17 1 +2.19 1 +2.20 1 +2.21 1 +2.22 1 +2.25 1 +2.26 2 +2.29 2 +2.32 1 +2.33 1 +2.35 2 +2.36 2 +2.37 2 +2.41 1 +2.45 1 +2.46 1 +2.48 3 +2.51 3 +2.52 1 +2.54 1 +2.55 1 +2.56 2 +2.57 1 +2.61 2 +2.62 1 +2.63 2 +2.64 1 +2.66 2 +2.68 3 +2.70 2 +2.72 3 +2.75 2 +2.76 1 +2.77 2 +2.80 2 +2.81 1 +2.82 5 +2.83 2 +2.84 2 +2.86 1 +2.87 2 +2.89 1 +2.90 1 +2.92 1 +2.93 1 +2.95 1 +2.96 2 +2.97 2 +2.98 2 +2.99 1 +20.00 1 +20.02 1 +20.06 1 +20.08 1 +20.09 1 +20.11 1 +20.12 1 +20.13 1 +20.14 2 +20.15 1 +20.18 1 +20.21 1 +20.23 2 +20.24 1 +20.29 1 +20.30 1 +20.31 2 +20.32 1 +20.33 1 +20.34 1 +20.35 3 +20.37 1 +20.39 2 +20.40 1 +20.43 2 +20.44 3 +20.46 1 +20.50 1 +20.51 3 +20.52 1 +20.53 1 +20.54 1 +20.56 1 +20.57 1 +20.60 1 +20.61 3 +20.63 1 +20.65 3 +20.67 1 +20.68 2 +20.69 3 +20.72 1 +20.80 1 +20.81 1 +20.84 1 +20.85 2 +20.88 2 +20.90 2 +20.91 1 +20.93 1 +20.94 3 +20.95 3 +20.97 3 +20.98 1 +20.99 1 +21.01 1 +21.02 1 +21.04 1 +21.06 1 +21.08 1 +21.09 1 +21.10 1 +21.11 2 +21.13 1 +21.14 2 +21.15 4 +21.19 2 +21.21 1 +21.23 1 +21.24 1 +21.25 1 +21.26 2 +21.27 1 +21.28 3 +21.29 4 +21.34 1 +21.35 1 +21.36 2 +21.37 1 +21.39 1 +21.40 1 +21.41 2 +21.42 1 +21.43 3 +21.44 2 +21.45 2 +21.46 1 +21.48 1 +21.49 3 +21.50 2 +21.55 4 +21.56 2 +21.59 4 +21.60 3 +21.61 2 +21.62 2 +21.63 1 +21.65 1 +21.66 2 +21.67 1 +21.68 1 +21.69 2 +21.70 3 +21.71 1 +21.72 1 +21.73 1 +21.75 1 +21.76 1 +21.78 1 +21.79 5 +21.80 1 +21.81 4 +21.83 1 +21.84 3 +21.89 2 +21.92 2 +21.93 3 +21.94 2 +21.96 2 +21.97 1 +21.98 1 +21.99 1 +22.00 1 +22.01 1 +22.02 2 +22.03 3 +22.04 1 +22.05 1 +22.07 2 +22.08 1 +22.11 1 +22.13 2 +22.14 2 +22.15 2 +22.16 3 +22.18 1 +22.19 1 +22.22 1 +22.23 1 +22.24 1 +22.25 1 +22.30 1 +22.32 1 +22.34 3 +22.35 1 +22.38 1 +22.40 1 +22.44 3 +22.45 1 +22.47 1 +22.48 1 +22.49 1 +22.53 1 +22.59 3 +22.60 2 +22.62 2 +22.64 1 +22.66 4 +22.67 1 +22.68 3 +22.69 1 +22.70 1 +22.71 2 +22.72 3 +22.73 3 +22.76 2 +22.77 2 +22.80 1 +22.81 1 +22.85 1 +22.86 1 +22.88 1 +22.93 1 +22.94 2 +22.95 1 +22.99 1 +23.01 2 +23.03 1 +23.05 1 +23.06 1 +23.07 3 +23.08 1 +23.09 3 +23.11 1 +23.14 2 +23.17 1 +23.18 1 +23.19 2 +23.20 2 +23.21 1 +23.22 1 +23.23 1 +23.24 1 +23.28 1 +23.32 1 +23.33 1 +23.34 2 +23.35 2 +23.37 1 +23.38 1 +23.40 1 +23.41 1 +23.42 2 +23.43 1 +23.44 2 +23.45 2 +23.46 1 +23.47 1 +23.48 1 +23.51 1 +23.53 4 +23.55 4 +23.57 2 +23.58 1 +23.59 1 +23.60 2 +23.61 2 +23.63 3 +23.65 1 +23.67 3 +23.68 1 +23.72 1 +23.73 1 +23.74 1 +23.76 2 +23.78 2 +23.79 2 +23.82 2 +23.84 3 +23.85 2 +23.89 1 +23.90 1 +23.91 3 +23.93 2 +23.94 2 +23.95 2 +23.99 2 +24.02 1 +24.03 1 +24.04 4 +24.05 1 +24.06 1 +24.07 1 +24.08 1 +24.10 1 +24.11 1 +24.14 2 +24.15 1 +24.16 1 +24.19 2 +24.20 2 +24.21 1 +24.23 1 +24.24 1 +24.25 1 +24.26 2 +24.30 2 +24.32 1 +24.33 1 +24.34 1 +24.36 3 +24.37 1 +24.38 1 +24.39 1 +24.40 2 +24.44 1 +24.45 1 +24.47 1 +24.48 4 +24.49 2 +24.53 1 +24.54 1 +24.55 1 +24.56 1 +24.60 1 +24.61 1 +24.62 2 +24.63 1 +24.64 1 +24.65 2 +24.68 1 +24.69 2 +24.70 2 +24.71 1 +24.73 1 +24.76 1 +24.78 1 +24.79 1 +24.80 1 +24.82 1 +24.83 1 +24.84 3 +24.86 1 +24.87 5 +24.89 1 +24.90 2 +24.92 2 +24.94 2 +24.96 1 +24.97 2 +24.98 2 +24.99 1 +25.00 2 +25.04 1 +25.06 1 +25.08 1 +25.10 2 +25.12 1 +25.13 2 +25.14 1 +25.15 1 +25.17 2 +25.18 3 +25.19 2 +25.20 1 +25.21 2 +25.22 1 +25.23 1 +25.24 1 +25.25 2 +25.27 1 +25.28 1 +25.29 2 +25.31 4 +25.35 1 +25.36 2 +25.37 1 +25.38 2 +25.39 1 +25.40 1 +25.41 4 +25.42 1 +25.43 1 +25.47 1 +25.50 1 +25.51 2 +25.52 2 +25.53 1 +25.56 2 +25.57 2 +25.58 3 +25.62 2 +25.63 1 +25.64 1 +25.65 1 +25.66 1 +25.67 2 +25.68 1 +25.69 1 +25.70 1 +25.71 5 +25.72 1 +25.73 1 +25.74 1 +25.76 1 +25.77 2 +25.80 1 +25.83 2 +25.84 2 +25.86 1 +25.88 2 +25.89 2 +25.90 1 +25.91 1 +25.92 1 +25.94 2 +25.95 1 +25.97 1 +25.98 1 +25.99 1 +26.00 1 +26.01 3 +26.02 2 +26.03 1 +26.04 1 +26.05 1 +26.06 2 +26.09 1 +26.10 1 +26.11 2 +26.12 1 +26.13 1 +26.16 1 +26.17 1 +26.19 1 +26.20 2 +26.21 2 +26.22 2 +26.24 1 +26.27 1 +26.31 1 +26.32 3 +26.33 1 +26.34 1 +26.35 1 +26.36 1 +26.37 1 +26.40 2 +26.41 1 +26.42 1 +26.43 1 +26.44 1 +26.46 2 +26.54 1 +26.56 1 +26.57 2 +26.60 1 +26.62 2 +26.64 1 +26.65 1 +26.66 1 +26.67 1 +26.68 1 +26.70 1 +26.73 1 +26.74 2 +26.77 1 +26.78 2 +26.79 1 +26.80 3 +26.81 1 +26.83 1 +26.85 2 +26.86 1 +26.88 1 +26.89 1 +26.91 1 +26.93 1 +26.94 2 +26.95 1 +26.96 2 +26.97 1 +26.98 1 +26.99 2 +27.00 1 +27.01 3 +27.02 1 +27.03 1 +27.04 4 +27.05 1 +27.06 1 +27.07 3 +27.12 4 +27.13 2 +27.14 1 +27.18 1 +27.25 1 +27.26 1 +27.27 1 +27.32 1 +27.33 2 +27.34 2 +27.35 3 +27.36 1 +27.38 1 +27.39 2 +27.43 1 +27.44 1 +27.45 1 +27.47 1 +27.48 1 +27.49 1 +27.51 1 +27.52 2 +27.53 3 +27.55 1 +27.56 1 +27.57 4 +27.58 3 +27.59 2 +27.60 1 +27.62 2 +27.63 3 +27.64 1 +27.67 1 +27.68 2 +27.71 1 +27.72 2 +27.75 1 +27.76 3 +27.79 1 +27.80 3 +27.81 1 +27.82 1 +27.83 1 +27.84 1 +27.86 1 +27.87 1 +27.88 1 +27.89 3 +27.90 1 +27.92 1 +27.95 1 +27.96 2 +27.99 1 +28.00 1 +28.02 1 +28.03 2 +28.04 2 +28.05 2 +28.07 3 +28.08 2 +28.09 3 +28.10 1 +28.11 2 +28.14 2 +28.15 2 +28.17 2 +28.18 1 +28.20 2 +28.22 2 +28.23 1 +28.25 2 +28.28 2 +28.29 1 +28.30 1 +28.31 4 +28.33 3 +28.34 1 +28.35 2 +28.37 1 +28.40 2 +28.41 2 +28.42 1 +28.43 1 +28.44 1 +28.47 2 +28.48 1 +28.49 2 +28.51 1 +28.53 1 +28.54 2 +28.55 2 +28.60 1 +28.61 1 +28.62 2 +28.63 2 +28.64 1 +28.65 1 +28.67 1 +28.68 1 +28.69 2 +28.71 2 +28.72 1 +28.74 2 +28.76 2 +28.78 1 +28.80 1 +28.81 1 +28.84 1 +28.85 1 +28.87 1 +28.90 2 +28.91 2 +28.92 3 +28.93 3 +28.94 1 +28.95 1 +28.96 1 +28.98 1 +29.00 1 +29.01 2 +29.02 1 +29.03 1 +29.04 1 +29.06 2 +29.07 1 +29.09 1 +29.14 1 +29.15 1 +29.19 2 +29.21 1 +29.23 1 +29.26 1 +29.27 3 +29.28 1 +29.30 1 +29.31 1 +29.32 2 +29.34 1 +29.36 2 +29.37 1 +29.38 1 +29.39 1 +29.40 1 +29.41 1 +29.42 1 +29.43 1 +29.48 2 +29.49 4 +29.53 1 +29.54 3 +29.55 1 +29.57 3 +29.58 1 +29.59 3 +29.60 1 +29.61 1 +29.62 2 +29.63 1 +29.64 2 +29.65 2 +29.66 2 +29.67 1 +29.68 3 +29.70 1 +29.72 1 +29.73 1 +29.74 2 +29.75 3 +29.79 1 +29.83 1 +29.84 1 +29.85 1 +29.86 1 +29.87 1 +29.88 1 +29.89 1 +29.90 1 +29.92 2 +29.93 1 +29.95 1 +29.96 1 +29.97 1 +29.99 3 +3.00 2 +3.01 2 +3.04 3 +3.07 3 +3.08 2 +3.09 1 +3.11 2 +3.12 1 +3.13 3 +3.14 1 +3.16 2 +3.18 2 +3.22 2 +3.25 1 +3.27 2 +3.29 2 +3.31 1 +3.34 1 +3.36 3 +3.37 2 +3.39 2 +3.40 1 +3.41 1 +3.43 2 +3.44 1 +3.45 2 +3.46 3 +3.47 2 +3.48 1 +3.49 1 +3.51 4 +3.52 2 +3.53 1 +3.54 2 +3.55 1 +3.58 3 +3.59 2 +3.60 3 +3.61 2 +3.64 1 +3.65 2 +3.68 2 +3.69 2 +3.73 2 +3.74 1 +3.75 2 +3.78 1 +3.79 1 +3.81 3 +3.83 1 +3.84 3 +3.85 1 +3.86 1 +3.87 4 +3.88 3 +3.90 1 +3.91 2 +3.93 3 +3.94 1 +3.95 2 +3.98 4 +30.00 2 +30.01 5 +30.02 2 +30.05 1 +30.06 2 +30.07 1 +30.08 2 +30.10 1 +30.11 2 +30.15 1 +30.16 1 +30.17 1 +30.18 1 +30.20 2 +30.21 1 +30.23 3 +30.24 1 +30.25 3 +30.26 1 +30.27 3 +30.30 1 +30.32 1 +30.33 2 +30.34 1 +30.35 2 +30.37 3 +30.38 1 +30.40 1 +30.41 1 +30.42 2 +30.43 2 +30.45 2 +30.47 1 +30.49 3 +30.54 1 +30.55 2 +30.57 3 +30.58 2 +30.59 3 +30.60 3 +30.65 2 +30.66 1 +30.67 3 +30.68 2 +30.70 2 +30.71 1 +30.72 1 +30.74 1 +30.76 1 +30.77 3 +30.78 1 +30.79 1 +30.80 3 +30.81 3 +30.82 1 +30.83 1 +30.84 2 +30.85 1 +30.86 1 +30.89 1 +30.91 1 +30.93 1 +30.94 3 +30.96 3 +30.97 1 +30.98 1 +30.99 1 +31.01 1 +31.02 2 +31.03 3 +31.04 1 +31.06 1 +31.07 2 +31.09 1 +31.10 2 +31.11 1 +31.12 1 +31.13 1 +31.16 1 +31.17 2 +31.18 1 +31.19 1 +31.20 1 +31.22 3 +31.23 1 +31.24 2 +31.25 1 +31.27 4 +31.28 1 +31.29 4 +31.30 1 +31.31 1 +31.32 1 +31.34 2 +31.35 3 +31.36 1 +31.37 1 +31.41 2 +31.42 3 +31.43 2 +31.44 2 +31.46 1 +31.48 2 +31.49 3 +31.50 2 +31.54 1 +31.56 3 +31.58 2 +31.59 1 +31.60 3 +31.63 1 +31.66 1 +31.68 2 +31.70 4 +31.71 2 +31.72 1 +31.73 1 +31.74 1 +31.75 2 +31.76 2 +31.78 1 +31.79 2 +31.83 3 +31.85 1 +31.87 2 +31.91 1 +31.92 1 +31.93 2 +31.94 1 +31.96 2 +31.98 1 +31.99 1 +32.01 2 +32.02 2 +32.03 2 +32.05 1 +32.06 2 +32.07 1 +32.08 2 +32.09 2 +32.10 3 +32.11 1 +32.12 2 +32.13 1 +32.15 3 +32.16 1 +32.17 1 +32.18 1 +32.20 1 +32.22 1 +32.25 1 +32.27 1 +32.28 1 +32.30 1 +32.31 1 +32.32 1 +32.33 1 +32.36 1 +32.37 1 +32.40 1 +32.41 3 +32.43 1 +32.44 1 +32.45 3 +32.47 1 +32.48 1 +32.52 1 +32.55 2 +32.56 2 +32.57 1 +32.60 1 +32.61 1 +32.63 1 +32.64 1 +32.65 2 +32.66 1 +32.68 2 +32.72 1 +32.73 2 +32.78 3 +32.79 1 +32.80 3 +32.83 1 +32.84 1 +32.85 2 +32.86 1 +32.88 1 +32.89 1 +32.92 1 +32.94 1 +32.95 2 +32.96 3 +32.97 2 +32.99 1 +33.00 3 +33.01 1 +33.03 5 +33.04 3 +33.05 1 +33.06 1 +33.07 3 +33.11 2 +33.12 2 +33.13 1 +33.14 1 +33.15 1 +33.17 1 +33.18 2 +33.20 2 +33.21 1 +33.22 3 +33.24 1 +33.25 1 +33.27 3 +33.29 3 +33.30 1 +33.31 1 +33.33 1 +33.35 2 +33.36 2 +33.37 3 +33.38 1 +33.39 1 +33.40 1 +33.41 2 +33.42 1 +33.45 2 +33.47 2 +33.49 1 +33.50 2 +33.51 2 +33.53 1 +33.55 1 +33.58 1 +33.59 1 +33.63 2 +33.64 1 +33.65 1 +33.66 2 +33.67 1 +33.68 1 +33.69 3 +33.70 1 +33.71 1 +33.72 3 +33.73 2 +33.75 1 +33.76 3 +33.77 4 +33.82 1 +33.83 1 +33.84 1 +33.87 1 +33.88 2 +33.89 2 +33.90 2 +33.92 2 +33.95 2 +33.99 1 +34.01 2 +34.02 1 +34.04 1 +34.07 1 +34.08 1 +34.10 1 +34.11 1 +34.12 1 +34.13 1 +34.14 1 +34.20 1 +34.22 4 +34.23 2 +34.27 2 +34.29 1 +34.30 1 +34.31 1 +34.32 1 +34.34 1 +34.37 1 +34.38 1 +34.40 2 +34.42 3 +34.44 1 +34.45 2 +34.47 1 +34.49 1 +34.50 1 +34.54 2 +34.55 2 +34.57 1 +34.58 2 +34.59 2 +34.61 2 +34.63 2 +34.64 1 +34.65 2 +34.66 1 +34.67 1 +34.70 2 +34.71 2 +34.72 2 +34.75 1 +34.77 1 +34.78 2 +34.79 1 +34.80 3 +34.82 2 +34.83 1 +34.84 2 +34.85 2 +34.87 4 +34.88 3 +34.90 2 +34.91 1 +34.94 1 +34.95 1 +34.96 1 +34.98 1 +35.01 1 +35.03 1 +35.05 1 +35.06 1 +35.07 1 +35.08 1 +35.09 1 +35.10 2 +35.11 1 +35.13 2 +35.14 4 +35.15 2 +35.16 1 +35.17 1 +35.19 1 +35.20 1 +35.21 3 +35.23 1 +35.25 1 +35.26 1 +35.27 1 +35.28 1 +35.29 1 +35.32 2 +35.33 1 +35.36 1 +35.37 1 +35.38 2 +35.40 3 +35.42 1 +35.43 1 +35.45 3 +35.46 2 +35.48 2 +35.51 1 +35.55 1 +35.56 1 +35.58 1 +35.59 1 +35.63 1 +35.65 1 +35.66 1 +35.68 2 +35.70 1 +35.73 2 +35.75 2 +35.76 1 +35.77 1 +35.78 2 +35.79 1 +35.80 2 +35.82 1 +35.83 2 +35.84 1 +35.85 1 +35.86 2 +35.89 1 +35.90 2 +35.93 1 +35.94 2 +35.95 1 +35.96 1 +35.97 2 +35.98 1 +36.04 2 +36.05 1 +36.06 1 +36.10 1 +36.11 3 +36.13 1 +36.17 1 +36.18 2 +36.21 1 +36.22 2 +36.23 1 +36.24 1 +36.25 2 +36.28 1 +36.30 1 +36.31 1 +36.32 2 +36.33 4 +36.34 2 +36.36 2 +36.37 1 +36.38 5 +36.41 4 +36.42 1 +36.43 3 +36.45 1 +36.48 1 +36.49 1 +36.50 2 +36.51 2 +36.52 1 +36.53 1 +36.54 2 +36.56 1 +36.57 1 +36.58 2 +36.59 1 +36.60 1 +36.61 2 +36.63 1 +36.65 1 +36.66 2 +36.68 1 +36.69 1 +36.73 2 +36.74 2 +36.75 3 +36.76 2 +36.77 6 +36.80 1 +36.81 1 +36.82 1 +36.84 1 +36.85 2 +36.86 1 +36.87 1 +36.88 1 +36.89 1 +36.90 1 +36.91 1 +36.92 2 +36.93 2 +36.95 3 +36.96 3 +36.97 1 +36.98 1 +36.99 1 +37.00 1 +37.01 1 +37.02 2 +37.03 1 +37.05 2 +37.06 3 +37.07 3 +37.10 1 +37.11 1 +37.12 2 +37.15 2 +37.16 1 +37.17 1 +37.18 1 +37.22 2 +37.24 1 +37.25 2 +37.27 1 +37.28 1 +37.30 1 +37.31 1 +37.32 1 +37.34 4 +37.35 2 +37.36 1 +37.38 1 +37.39 2 +37.41 2 +37.42 1 +37.43 2 +37.44 1 +37.45 2 +37.46 1 +37.48 3 +37.53 1 +37.55 1 +37.56 1 +37.60 5 +37.61 1 +37.63 2 +37.64 1 +37.65 1 +37.66 1 +37.67 1 +37.69 1 +37.70 1 +37.71 2 +37.73 1 +37.74 3 +37.76 1 +37.78 2 +37.79 1 +37.80 2 +37.82 2 +37.84 3 +37.85 1 +37.86 3 +37.88 1 +37.91 1 +37.93 1 +37.94 1 +37.95 3 +37.97 2 +37.98 1 +37.99 1 +38.00 1 +38.01 2 +38.02 1 +38.03 2 +38.04 2 +38.05 1 +38.08 3 +38.09 1 +38.11 1 +38.13 1 +38.14 2 +38.15 1 +38.16 1 +38.17 1 +38.19 1 +38.20 1 +38.22 2 +38.23 3 +38.24 1 +38.25 2 +38.26 1 +38.27 2 +38.29 1 +38.30 1 +38.31 1 +38.33 1 +38.34 2 +38.35 1 +38.36 1 +38.37 1 +38.39 1 +38.41 1 +38.42 1 +38.43 1 +38.44 1 +38.45 1 +38.46 1 +38.47 1 +38.48 1 +38.49 1 +38.50 4 +38.54 3 +38.55 2 +38.56 1 +38.58 1 +38.60 3 +38.61 3 +38.64 1 +38.65 1 +38.66 1 +38.67 2 +38.70 3 +38.71 2 +38.73 2 +38.74 1 +38.75 2 +38.76 1 +38.77 1 +38.81 1 +38.82 2 +38.83 1 +38.84 2 +38.88 3 +38.91 2 +38.93 2 +38.98 4 +39.04 2 +39.05 2 +39.06 1 +39.07 1 +39.09 1 +39.12 5 +39.14 1 +39.15 1 +39.16 2 +39.18 1 +39.19 1 +39.20 2 +39.21 1 +39.22 1 +39.25 1 +39.27 2 +39.31 1 +39.32 2 +39.36 1 +39.37 3 +39.38 1 +39.40 2 +39.42 4 +39.43 1 +39.47 2 +39.48 2 +39.51 1 +39.52 2 +39.55 1 +39.56 2 +39.60 2 +39.61 4 +39.62 1 +39.63 1 +39.67 1 +39.69 2 +39.70 2 +39.71 1 +39.74 2 +39.75 1 +39.76 1 +39.77 2 +39.80 1 +39.81 2 +39.82 2 +39.84 1 +39.85 1 +39.86 2 +39.88 1 +39.89 2 +39.90 3 +39.91 1 +39.92 1 +39.94 2 +39.95 1 +39.96 1 +39.97 1 +39.98 2 +4.03 1 +4.05 1 +4.07 1 +4.08 1 +4.09 1 +4.11 1 +4.14 2 +4.16 2 +4.17 1 +4.18 1 +4.19 1 +4.21 2 +4.22 1 +4.23 1 +4.24 1 +4.25 1 +4.26 1 +4.28 2 +4.29 1 +4.30 2 +4.31 2 +4.32 1 +4.33 2 +4.34 1 +4.35 2 +4.36 2 +4.39 1 +4.40 1 +4.41 1 +4.43 1 +4.44 2 +4.45 1 +4.47 2 +4.49 2 +4.50 1 +4.51 1 +4.52 1 +4.54 1 +4.55 3 +4.56 1 +4.57 1 +4.58 2 +4.59 1 +4.60 5 +4.62 3 +4.64 2 +4.67 1 +4.68 1 +4.70 3 +4.73 1 +4.74 1 +4.77 1 +4.78 2 +4.79 1 +4.82 1 +4.85 1 +4.87 1 +4.88 2 +4.89 3 +4.90 1 +4.91 1 +4.93 1 +4.94 1 +4.95 1 +4.97 2 +40.04 1 +40.06 2 +40.07 2 +40.10 1 +40.11 2 +40.12 3 +40.14 1 +40.18 2 +40.21 1 +40.22 1 +40.28 1 +40.29 1 +40.30 1 +40.33 2 +40.35 1 +40.39 4 +40.40 1 +40.41 1 +40.42 1 +40.43 1 +40.48 1 +40.50 3 +40.51 1 +40.52 1 +40.54 2 +40.56 1 +40.57 2 +40.58 3 +40.59 1 +40.60 2 +40.62 2 +40.63 3 +40.64 1 +40.65 2 +40.66 1 +40.67 1 +40.68 3 +40.70 1 +40.73 1 +40.74 1 +40.76 1 +40.79 1 +40.81 2 +40.82 1 +40.84 1 +40.87 1 +40.88 1 +40.90 1 +40.91 1 +40.92 1 +40.93 1 +40.94 1 +40.96 1 +40.97 1 +40.99 2 +41.00 1 +41.01 2 +41.02 2 +41.04 1 +41.05 2 +41.06 1 +41.08 2 +41.10 1 +41.11 2 +41.13 3 +41.14 1 +41.16 2 +41.19 2 +41.21 1 +41.26 1 +41.27 2 +41.28 1 +41.30 2 +41.32 1 +41.33 1 +41.35 1 +41.37 2 +41.38 1 +41.39 2 +41.40 1 +41.41 1 +41.43 1 +41.44 2 +41.45 1 +41.46 1 +41.47 1 +41.51 2 +41.52 2 +41.53 1 +41.54 2 +41.55 2 +41.56 2 +41.57 2 +41.58 2 +41.59 1 +41.60 1 +41.61 3 +41.62 1 +41.63 1 +41.67 2 +41.68 3 +41.69 2 +41.70 2 +41.71 2 +41.72 1 +41.75 1 +41.76 1 +41.78 1 +41.80 1 +41.81 2 +41.83 1 +41.86 1 +41.87 3 +41.88 2 +41.89 1 +41.90 2 +41.91 1 +41.92 1 +41.93 1 +41.96 1 +41.97 1 +42.00 1 +42.01 2 +42.02 3 +42.04 1 +42.06 1 +42.09 4 +42.11 1 +42.13 2 +42.14 1 +42.17 1 +42.19 1 +42.20 1 +42.22 2 +42.25 1 +42.26 2 +42.28 2 +42.29 1 +42.30 2 +42.33 1 +42.34 1 +42.35 1 +42.36 2 +42.38 1 +42.39 2 +42.40 2 +42.42 1 +42.43 1 +42.44 1 +42.47 1 +42.48 2 +42.52 2 +42.53 1 +42.54 2 +42.57 1 +42.58 2 +42.60 2 +42.62 2 +42.64 1 +42.66 2 +42.67 1 +42.68 3 +42.70 1 +42.71 1 +42.74 5 +42.76 2 +42.78 2 +42.79 1 +42.81 1 +42.82 1 +42.84 1 +42.85 5 +42.86 1 +42.87 1 +42.88 3 +42.89 1 +42.90 1 +42.91 1 +42.93 4 +42.94 2 +42.98 1 +42.99 1 +43.00 1 +43.01 1 +43.03 1 +43.05 1 +43.08 1 +43.09 1 +43.10 1 +43.11 2 +43.13 2 +43.16 1 +43.18 3 +43.25 2 +43.26 5 +43.29 1 +43.31 2 +43.32 1 +43.33 2 +43.37 3 +43.39 1 +43.41 2 +43.42 1 +43.43 1 +43.44 1 +43.45 1 +43.47 1 +43.48 1 +43.49 1 +43.51 1 +43.53 1 +43.55 3 +43.56 2 +43.58 1 +43.59 1 +43.60 1 +43.61 1 +43.62 1 +43.65 2 +43.66 3 +43.67 1 +43.71 1 +43.72 2 +43.73 1 +43.74 1 +43.75 1 +43.77 1 +43.78 2 +43.79 1 +43.80 4 +43.81 3 +43.82 1 +43.84 2 +43.85 1 +43.87 1 +43.88 1 +43.89 1 +43.92 2 +43.93 1 +43.94 1 +43.95 1 +44.00 1 +44.02 1 +44.04 1 +44.09 2 +44.10 2 +44.17 1 +44.18 1 +44.19 1 +44.20 2 +44.21 4 +44.22 1 +44.24 1 +44.25 1 +44.26 2 +44.30 2 +44.31 1 +44.32 4 +44.33 2 +44.35 1 +44.36 2 +44.37 1 +44.38 1 +44.39 1 +44.41 2 +44.44 1 +44.47 2 +44.49 1 +44.50 1 +44.51 2 +44.52 1 +44.53 2 +44.56 1 +44.57 1 +44.58 3 +44.59 2 +44.60 2 +44.61 2 +44.65 2 +44.66 2 +44.67 2 +44.68 1 +44.69 1 +44.72 1 +44.74 1 +44.75 3 +44.76 1 +44.77 1 +44.78 1 +44.80 1 +44.81 2 +44.83 1 +44.84 1 +44.85 2 +44.88 2 +44.90 1 +44.91 2 +44.92 1 +44.93 1 +44.94 4 +44.95 1 +44.96 5 +44.98 1 +45.00 1 +45.02 1 +45.03 1 +45.05 1 +45.06 1 +45.08 2 +45.12 1 +45.13 1 +45.15 1 +45.19 1 +45.20 1 +45.21 2 +45.23 1 +45.24 1 +45.25 2 +45.29 4 +45.30 1 +45.31 1 +45.32 1 +45.33 2 +45.34 1 +45.35 1 +45.36 3 +45.38 3 +45.40 2 +45.41 1 +45.43 1 +45.45 3 +45.46 1 +45.47 1 +45.48 1 +45.49 1 +45.54 1 +45.58 2 +45.60 1 +45.61 1 +45.63 2 +45.64 2 +45.65 1 +45.67 2 +45.69 4 +45.70 2 +45.74 1 +45.77 2 +45.78 1 +45.81 2 +45.82 1 +45.83 2 +45.84 1 +45.86 4 +45.89 3 +45.90 1 +45.92 1 +45.94 1 +45.96 2 +45.97 1 +45.98 3 +45.99 1 +46.00 1 +46.01 1 +46.04 1 +46.05 1 +46.06 1 +46.09 3 +46.11 1 +46.12 1 +46.13 1 +46.14 3 +46.15 1 +46.19 2 +46.20 1 +46.21 1 +46.23 2 +46.24 1 +46.25 1 +46.26 2 +46.29 2 +46.30 1 +46.32 2 +46.33 1 +46.34 2 +46.35 1 +46.36 3 +46.37 1 +46.39 2 +46.40 1 +46.41 1 +46.42 2 +46.43 1 +46.45 1 +46.47 2 +46.48 2 +46.50 1 +46.53 1 +46.54 1 +46.55 1 +46.57 1 +46.58 1 +46.60 1 +46.61 1 +46.62 1 +46.66 1 +46.70 1 +46.71 1 +46.72 2 +46.73 1 +46.74 1 +46.75 1 +46.77 1 +46.78 2 +46.81 1 +46.84 2 +46.86 1 +46.87 2 +46.90 1 +46.91 1 +46.92 2 +46.93 4 +46.94 1 +46.95 1 +46.96 1 +46.97 1 +46.98 2 +46.99 3 +47.02 1 +47.03 2 +47.06 1 +47.07 1 +47.08 1 +47.09 3 +47.11 1 +47.12 3 +47.13 1 +47.16 3 +47.17 1 +47.19 1 +47.23 3 +47.25 3 +47.26 2 +47.29 1 +47.30 1 +47.31 1 +47.32 1 +47.34 1 +47.35 3 +47.37 1 +47.38 1 +47.41 2 +47.42 1 +47.45 5 +47.46 2 +47.48 1 +47.49 2 +47.51 2 +47.52 1 +47.53 1 +47.54 3 +47.55 1 +47.56 1 +47.57 2 +47.60 1 +47.61 2 +47.62 1 +47.63 1 +47.64 1 +47.65 1 +47.66 1 +47.68 1 +47.69 1 +47.70 1 +47.71 1 +47.72 1 +47.75 1 +47.77 3 +47.79 1 +47.81 2 +47.82 1 +47.83 1 +47.85 1 +47.86 1 +47.88 1 +47.89 1 +47.91 5 +47.94 1 +47.96 1 +47.97 3 +47.99 2 +48.02 5 +48.05 1 +48.06 1 +48.07 2 +48.08 2 +48.11 1 +48.12 1 +48.13 3 +48.14 1 +48.15 1 +48.16 1 +48.17 3 +48.18 1 +48.19 1 +48.20 4 +48.21 3 +48.22 1 +48.24 1 +48.25 1 +48.26 1 +48.27 2 +48.28 1 +48.29 2 +48.31 1 +48.32 1 +48.33 1 +48.34 2 +48.36 1 +48.37 2 +48.38 1 +48.41 1 +48.42 1 +48.43 2 +48.48 3 +48.49 2 +48.50 3 +48.51 3 +48.54 3 +48.55 1 +48.59 1 +48.60 1 +48.61 2 +48.63 1 +48.64 1 +48.68 3 +48.69 2 +48.70 1 +48.71 2 +48.73 2 +48.75 1 +48.76 2 +48.77 3 +48.78 1 +48.79 2 +48.80 3 +48.81 3 +48.83 1 +48.84 4 +48.85 1 +48.86 1 +48.88 2 +48.90 3 +48.91 1 +48.92 1 +48.93 1 +48.94 2 +48.95 1 +48.96 2 +48.97 1 +48.98 1 +49.00 1 +49.01 2 +49.02 2 +49.03 1 +49.05 1 +49.06 1 +49.07 1 +49.10 2 +49.12 3 +49.13 1 +49.14 1 +49.15 1 +49.17 1 +49.18 2 +49.19 3 +49.21 1 +49.23 1 +49.24 1 +49.26 3 +49.28 1 +49.30 1 +49.31 2 +49.34 1 +49.35 1 +49.38 2 +49.39 1 +49.40 1 +49.43 2 +49.44 1 +49.46 1 +49.49 2 +49.50 1 +49.51 1 +49.54 3 +49.55 1 +49.57 1 +49.60 3 +49.62 2 +49.65 1 +49.67 3 +49.69 2 +49.70 3 +49.71 2 +49.72 2 +49.73 4 +49.75 2 +49.78 1 +49.79 1 +49.80 2 +49.81 2 +49.82 2 +49.83 2 +49.84 2 +49.85 1 +49.87 1 +49.88 4 +49.89 2 +49.90 1 +49.91 3 +49.92 2 +49.93 2 +49.95 1 +49.97 1 +49.99 2 +5.00 2 +5.01 1 +5.02 1 +5.03 1 +5.04 1 +5.05 4 +5.06 1 +5.07 2 +5.09 2 +5.10 3 +5.12 1 +5.13 1 +5.14 2 +5.15 1 +5.16 1 +5.18 1 +5.19 1 +5.20 1 +5.21 2 +5.24 3 +5.26 1 +5.28 1 +5.31 2 +5.33 1 +5.35 1 +5.37 2 +5.39 1 +5.42 2 +5.43 1 +5.45 1 +5.46 4 +5.47 1 +5.49 3 +5.50 3 +5.51 1 +5.52 1 +5.53 1 +5.58 1 +5.63 1 +5.64 2 +5.65 2 +5.68 2 +5.69 1 +5.71 1 +5.72 2 +5.73 1 +5.75 1 +5.76 2 +5.81 1 +5.82 1 +5.83 3 +5.84 1 +5.86 2 +5.88 2 +5.89 1 +5.90 1 +5.93 3 +5.96 1 +5.97 1 +5.98 1 +5.99 1 +50.00 2 +50.01 2 +50.02 1 +50.03 2 +50.04 2 +50.06 1 +50.08 1 +50.09 1 +50.10 1 +50.13 2 +50.14 1 +50.15 1 +50.17 2 +50.21 1 +50.22 3 +50.25 2 +50.26 4 +50.29 1 +50.30 2 +50.31 2 +50.32 1 +50.33 1 +50.34 1 +50.38 1 +50.39 1 +50.40 1 +50.42 1 +50.45 2 +50.50 2 +50.51 1 +50.53 1 +50.54 1 +50.55 1 +50.56 1 +50.57 3 +50.59 2 +50.60 5 +50.61 3 +50.62 1 +50.63 1 +50.66 1 +50.67 1 +50.71 1 +50.72 1 +50.73 3 +50.75 1 +50.76 1 +50.78 2 +50.79 2 +50.80 2 +50.83 1 +50.84 2 +50.85 1 +50.86 2 +50.90 1 +50.96 1 +50.97 2 +50.98 1 +50.99 1 +51.01 1 +51.02 1 +51.04 2 +51.08 4 +51.09 2 +51.10 1 +51.12 1 +51.13 3 +51.14 3 +51.15 1 +51.18 1 +51.19 1 +51.21 1 +51.29 3 +51.32 1 +51.33 2 +51.36 2 +51.39 1 +51.40 2 +51.43 3 +51.45 1 +51.48 2 +51.52 1 +51.53 1 +51.54 3 +51.55 2 +51.56 2 +51.58 3 +51.59 1 +51.62 1 +51.64 1 +51.66 1 +51.68 4 +51.69 1 +51.70 1 +51.71 1 +51.74 2 +51.76 2 +51.78 1 +51.79 1 +51.82 1 +51.83 1 +51.84 2 +51.85 1 +51.86 1 +51.88 2 +51.89 2 +51.90 3 +51.91 3 +51.94 1 +51.95 2 +52.01 1 +52.02 2 +52.05 2 +52.08 3 +52.10 1 +52.12 1 +52.13 1 +52.17 1 +52.19 1 +52.20 1 +52.23 2 +52.24 2 +52.28 2 +52.29 1 +52.32 1 +52.33 1 +52.36 1 +52.38 2 +52.41 3 +52.42 1 +52.43 1 +52.45 1 +52.48 1 +52.49 1 +52.50 1 +52.51 2 +52.52 2 +52.53 1 +52.55 1 +52.56 2 +52.58 1 +52.59 2 +52.60 2 +52.61 1 +52.62 1 +52.63 1 +52.64 1 +52.65 2 +52.66 1 +52.67 1 +52.68 1 +52.69 2 +52.70 3 +52.72 1 +52.74 1 +52.75 1 +52.77 1 +52.78 2 +52.79 1 +52.80 3 +52.81 2 +52.83 2 +52.84 2 +52.85 1 +52.88 1 +52.89 2 +52.90 2 +52.92 1 +52.94 1 +52.95 2 +52.96 1 +52.97 1 +52.98 1 +53.02 1 +53.04 2 +53.06 1 +53.07 2 +53.08 2 +53.09 1 +53.10 2 +53.13 1 +53.14 3 +53.16 2 +53.21 1 +53.22 1 +53.24 1 +53.25 1 +53.27 2 +53.28 2 +53.29 1 +53.30 1 +53.31 3 +53.34 1 +53.35 2 +53.36 1 +53.37 3 +53.38 2 +53.39 2 +53.40 1 +53.41 3 +53.46 1 +53.47 1 +53.48 2 +53.49 1 +53.50 2 +53.51 1 +53.52 2 +53.53 3 +53.55 1 +53.56 2 +53.57 1 +53.58 1 +53.59 1 +53.60 1 +53.61 4 +53.63 1 +53.64 2 +53.65 4 +53.68 1 +53.69 1 +53.72 2 +53.73 1 +53.74 1 +53.75 1 +53.77 1 +53.79 1 +53.80 1 +53.82 1 +53.83 1 +53.84 2 +53.85 2 +53.86 1 +53.89 1 +53.91 2 +53.92 3 +53.93 1 +53.94 4 +53.96 1 +53.97 1 +53.98 2 +54.00 1 +54.01 1 +54.02 3 +54.03 3 +54.04 4 +54.05 2 +54.07 1 +54.09 1 +54.10 2 +54.11 1 +54.15 1 +54.16 2 +54.18 1 +54.19 1 +54.20 2 +54.21 1 +54.23 1 +54.25 1 +54.26 2 +54.27 1 +54.28 1 +54.29 2 +54.30 1 +54.31 2 +54.32 2 +54.33 1 +54.34 1 +54.35 2 +54.37 1 +54.39 1 +54.41 1 +54.42 1 +54.43 2 +54.45 2 +54.46 1 +54.47 2 +54.48 1 +54.49 1 +54.50 1 +54.51 1 +54.53 2 +54.54 1 +54.55 1 +54.56 1 +54.57 1 +54.58 1 +54.61 1 +54.62 2 +54.64 3 +54.65 1 +54.66 3 +54.67 2 +54.68 2 +54.69 2 +54.70 1 +54.72 3 +54.73 1 +54.74 1 +54.75 5 +54.76 3 +54.79 1 +54.80 2 +54.82 2 +54.85 1 +54.86 1 +54.88 1 +54.89 1 +54.90 1 +54.92 1 +54.93 1 +54.94 1 +54.96 1 +54.98 3 +55.00 2 +55.02 1 +55.03 1 +55.04 3 +55.05 1 +55.06 1 +55.07 1 +55.09 1 +55.10 1 +55.12 3 +55.13 1 +55.14 1 +55.16 1 +55.18 1 +55.19 1 +55.22 1 +55.23 2 +55.24 1 +55.25 1 +55.26 1 +55.30 2 +55.31 1 +55.33 1 +55.36 1 +55.37 2 +55.38 2 +55.39 3 +55.40 2 +55.41 4 +55.42 1 +55.43 1 +55.44 1 +55.45 3 +55.47 2 +55.49 2 +55.50 1 +55.56 1 +55.59 1 +55.60 1 +55.61 1 +55.64 1 +55.66 1 +55.68 3 +55.69 1 +55.70 2 +55.71 1 +55.73 1 +55.75 2 +55.76 3 +55.77 1 +55.78 1 +55.80 1 +55.81 2 +55.82 2 +55.83 2 +55.84 1 +55.86 1 +55.87 1 +55.89 2 +55.90 4 +55.92 1 +55.93 1 +55.94 4 +55.96 1 +55.97 2 +56.00 1 +56.01 1 +56.02 2 +56.04 3 +56.05 1 +56.06 2 +56.07 2 +56.08 2 +56.10 1 +56.11 1 +56.12 1 +56.13 1 +56.15 2 +56.16 1 +56.18 1 +56.20 1 +56.21 1 +56.22 1 +56.23 1 +56.27 1 +56.28 1 +56.32 2 +56.33 3 +56.34 1 +56.36 2 +56.37 1 +56.38 1 +56.41 1 +56.43 1 +56.44 1 +56.45 2 +56.48 1 +56.49 1 +56.52 1 +56.53 2 +56.54 1 +56.55 1 +56.56 2 +56.57 1 +56.58 1 +56.59 3 +56.60 2 +56.63 2 +56.64 2 +56.65 4 +56.70 3 +56.72 1 +56.73 2 +56.74 4 +56.79 1 +56.82 2 +56.83 4 +56.84 1 +56.85 2 +56.86 2 +56.90 2 +56.97 1 +56.98 2 +56.99 1 +57.01 1 +57.03 2 +57.07 1 +57.09 2 +57.10 3 +57.11 1 +57.12 2 +57.15 1 +57.17 2 +57.19 1 +57.20 1 +57.21 3 +57.22 2 +57.23 1 +57.25 2 +57.26 1 +57.28 1 +57.29 2 +57.30 1 +57.32 1 +57.33 4 +57.34 2 +57.36 1 +57.37 1 +57.38 2 +57.39 2 +57.40 1 +57.41 1 +57.42 1 +57.46 1 +57.47 1 +57.48 1 +57.49 2 +57.50 1 +57.53 1 +57.55 2 +57.56 1 +57.58 5 +57.59 1 +57.60 1 +57.61 3 +57.63 1 +57.64 1 +57.65 2 +57.66 2 +57.67 3 +57.68 2 +57.69 3 +57.71 2 +57.73 1 +57.74 1 +57.79 2 +57.80 1 +57.82 2 +57.83 1 +57.86 2 +57.87 2 +57.88 2 +57.89 2 +57.90 1 +57.91 2 +57.93 1 +57.94 1 +57.95 1 +57.97 1 +57.99 3 +58.01 1 +58.02 1 +58.03 3 +58.04 1 +58.05 5 +58.06 2 +58.07 1 +58.08 1 +58.10 2 +58.12 1 +58.14 1 +58.15 2 +58.19 1 +58.20 1 +58.21 1 +58.23 3 +58.24 1 +58.25 3 +58.27 2 +58.28 2 +58.29 1 +58.30 1 +58.31 1 +58.34 1 +58.38 1 +58.39 2 +58.40 2 +58.41 3 +58.42 1 +58.44 1 +58.45 1 +58.46 1 +58.47 1 +58.50 1 +58.51 1 +58.52 1 +58.54 3 +58.56 1 +58.58 1 +58.59 1 +58.60 1 +58.62 1 +58.63 1 +58.64 2 +58.65 1 +58.66 1 +58.67 2 +58.68 2 +58.69 1 +58.70 1 +58.71 1 +58.75 2 +58.78 2 +58.79 4 +58.80 4 +58.81 3 +58.82 4 +58.83 3 +58.84 2 +58.85 3 +58.86 2 +58.87 1 +58.88 1 +58.89 1 +58.90 4 +58.92 2 +58.94 1 +58.96 2 +58.97 2 +58.99 2 +59.00 1 +59.01 1 +59.02 2 +59.03 3 +59.06 1 +59.08 1 +59.10 2 +59.12 2 +59.13 2 +59.16 1 +59.17 2 +59.18 1 +59.19 1 +59.21 1 +59.24 1 +59.26 1 +59.28 2 +59.29 1 +59.30 1 +59.32 2 +59.33 1 +59.35 2 +59.36 1 +59.37 1 +59.38 1 +59.40 2 +59.41 2 +59.42 2 +59.43 2 +59.44 2 +59.46 2 +59.47 2 +59.48 1 +59.49 5 +59.51 2 +59.52 1 +59.53 2 +59.54 1 +59.55 2 +59.56 3 +59.57 1 +59.58 1 +59.59 1 +59.62 1 +59.63 1 +59.64 2 +59.67 1 +59.68 2 +59.69 2 +59.70 1 +59.71 2 +59.72 1 +59.74 1 +59.76 1 +59.77 3 +59.78 1 +59.79 1 +59.82 1 +59.83 1 +59.89 1 +59.90 1 +59.91 1 +59.92 2 +59.93 2 +59.94 1 +59.95 2 +59.96 1 +59.99 1 +6.00 2 +6.02 2 +6.03 1 +6.04 1 +6.06 2 +6.09 1 +6.10 1 +6.11 1 +6.14 1 +6.15 1 +6.16 1 +6.17 1 +6.18 1 +6.19 2 +6.21 2 +6.22 1 +6.24 2 +6.25 1 +6.26 1 +6.29 2 +6.30 1 +6.35 2 +6.37 1 +6.39 2 +6.40 1 +6.41 1 +6.42 2 +6.43 1 +6.47 1 +6.48 1 +6.50 1 +6.54 2 +6.57 1 +6.59 1 +6.60 1 +6.61 4 +6.62 1 +6.63 1 +6.64 3 +6.66 1 +6.71 1 +6.74 1 +6.76 2 +6.78 2 +6.79 2 +6.81 1 +6.82 2 +6.83 2 +6.84 1 +6.85 2 +6.87 1 +6.88 1 +6.89 2 +6.90 1 +6.92 1 +6.93 1 +6.99 1 +60.00 2 +60.01 3 +60.02 1 +60.04 2 +60.05 3 +60.13 1 +60.16 1 +60.17 2 +60.18 3 +60.20 2 +60.22 1 +60.23 1 +60.24 2 +60.25 2 +60.26 1 +60.29 3 +60.30 1 +60.32 1 +60.35 2 +60.36 1 +60.37 2 +60.38 1 +60.39 1 +60.41 1 +60.42 1 +60.45 1 +60.46 3 +60.48 1 +60.51 1 +60.52 1 +60.53 1 +60.55 1 +60.56 2 +60.57 2 +60.58 1 +60.59 1 +60.60 1 +60.62 1 +60.63 1 +60.64 1 +60.66 2 +60.67 2 +60.70 1 +60.75 2 +60.77 1 +60.78 2 +60.80 2 +60.81 1 +60.82 1 +60.83 1 +60.85 3 +60.86 1 +60.87 3 +60.88 2 +60.89 1 +60.90 2 +60.92 1 +60.93 1 +60.94 2 +60.96 1 +60.97 1 +60.98 1 +60.99 1 +61.00 2 +61.02 1 +61.03 1 +61.04 1 +61.05 2 +61.06 1 +61.08 1 +61.11 4 +61.12 2 +61.13 2 +61.14 1 +61.15 2 +61.16 2 +61.18 1 +61.19 1 +61.20 1 +61.22 1 +61.23 1 +61.24 3 +61.26 1 +61.28 2 +61.29 2 +61.30 3 +61.31 2 +61.33 1 +61.34 3 +61.36 3 +61.38 2 +61.39 1 +61.41 2 +61.42 1 +61.43 1 +61.44 2 +61.46 1 +61.47 1 +61.50 4 +61.52 3 +61.53 1 +61.54 1 +61.55 1 +61.56 4 +61.57 2 +61.58 2 +61.59 2 +61.64 1 +61.67 1 +61.68 2 +61.69 1 +61.70 1 +61.77 2 +61.79 1 +61.81 2 +61.82 2 +61.83 1 +61.84 1 +61.85 2 +61.88 2 +61.89 3 +61.90 2 +61.93 1 +61.94 2 +61.96 2 +61.99 1 +62.00 2 +62.01 3 +62.02 1 +62.03 1 +62.04 2 +62.09 2 +62.11 3 +62.12 1 +62.14 1 +62.16 2 +62.17 1 +62.20 2 +62.21 1 +62.25 2 +62.26 2 +62.28 1 +62.31 2 +62.32 2 +62.33 1 +62.34 1 +62.35 4 +62.36 1 +62.37 1 +62.39 2 +62.41 1 +62.42 2 +62.43 2 +62.44 1 +62.45 1 +62.47 1 +62.48 2 +62.49 1 +62.51 2 +62.52 1 +62.53 1 +62.56 1 +62.58 1 +62.59 4 +62.60 1 +62.62 1 +62.65 2 +62.66 1 +62.67 1 +62.68 1 +62.70 3 +62.71 1 +62.72 1 +62.73 2 +62.77 1 +62.79 1 +62.80 2 +62.81 3 +62.82 1 +62.83 1 +62.84 4 +62.86 1 +62.89 3 +62.91 2 +62.92 1 +62.93 3 +62.95 2 +62.96 1 +62.98 1 +62.99 2 +63.00 1 +63.01 2 +63.02 1 +63.03 1 +63.05 2 +63.06 1 +63.07 1 +63.08 2 +63.10 1 +63.11 1 +63.12 1 +63.14 4 +63.15 1 +63.16 1 +63.17 1 +63.18 1 +63.21 2 +63.22 1 +63.23 2 +63.24 3 +63.25 2 +63.26 1 +63.27 2 +63.28 2 +63.29 2 +63.30 3 +63.31 1 +63.33 1 +63.34 1 +63.35 1 +63.36 3 +63.39 2 +63.40 1 +63.41 1 +63.43 1 +63.46 1 +63.47 1 +63.48 1 +63.49 3 +63.50 1 +63.51 2 +63.52 1 +63.54 2 +63.55 3 +63.57 2 +63.58 1 +63.59 1 +63.60 1 +63.61 1 +63.64 1 +63.65 3 +63.66 2 +63.70 1 +63.72 2 +63.73 1 +63.74 2 +63.75 1 +63.77 2 +63.78 3 +63.80 2 +63.81 2 +63.83 1 +63.85 1 +63.87 1 +63.90 2 +63.92 2 +63.93 3 +63.94 3 +63.99 3 +64.00 2 +64.01 2 +64.04 1 +64.05 2 +64.06 2 +64.09 2 +64.12 2 +64.14 1 +64.15 2 +64.16 1 +64.17 1 +64.18 2 +64.19 2 +64.20 2 +64.21 2 +64.22 3 +64.23 3 +64.24 2 +64.28 1 +64.29 2 +64.30 1 +64.32 1 +64.33 2 +64.34 1 +64.35 2 +64.36 2 +64.38 2 +64.39 2 +64.40 4 +64.41 2 +64.42 2 +64.43 1 +64.44 2 +64.45 1 +64.46 2 +64.49 2 +64.52 2 +64.55 1 +64.60 1 +64.61 1 +64.62 1 +64.63 1 +64.65 3 +64.68 1 +64.69 1 +64.70 2 +64.71 3 +64.75 3 +64.76 1 +64.77 3 +64.79 1 +64.80 1 +64.81 2 +64.82 1 +64.84 1 +64.88 3 +64.89 1 +64.90 2 +64.91 3 +64.94 2 +64.95 3 +64.98 3 +64.99 2 +65.01 1 +65.02 1 +65.03 1 +65.05 1 +65.06 1 +65.07 1 +65.08 2 +65.13 2 +65.16 1 +65.17 2 +65.18 3 +65.20 1 +65.21 2 +65.22 1 +65.23 1 +65.24 2 +65.25 1 +65.26 1 +65.27 1 +65.28 2 +65.30 1 +65.31 1 +65.32 1 +65.33 2 +65.34 1 +65.35 1 +65.37 5 +65.40 1 +65.42 1 +65.43 1 +65.45 2 +65.46 2 +65.49 2 +65.50 3 +65.53 1 +65.57 1 +65.58 2 +65.61 1 +65.62 1 +65.66 2 +65.69 1 +65.72 1 +65.74 1 +65.75 1 +65.77 3 +65.78 2 +65.79 2 +65.81 2 +65.82 3 +65.85 5 +65.86 1 +65.87 2 +65.88 1 +65.89 1 +65.90 1 +65.91 1 +65.92 1 +65.93 1 +65.98 1 +65.99 1 +66.01 1 +66.02 1 +66.07 2 +66.08 1 +66.09 1 +66.10 1 +66.11 2 +66.13 1 +66.15 2 +66.16 2 +66.19 1 +66.22 1 +66.23 3 +66.24 2 +66.25 1 +66.27 2 +66.30 1 +66.32 2 +66.33 1 +66.34 3 +66.36 3 +66.37 1 +66.39 1 +66.41 1 +66.43 2 +66.45 1 +66.46 1 +66.47 1 +66.49 1 +66.52 1 +66.53 3 +66.54 1 +66.55 1 +66.56 1 +66.57 1 +66.58 2 +66.59 1 +66.60 1 +66.61 1 +66.62 2 +66.64 1 +66.67 1 +66.68 2 +66.71 1 +66.74 2 +66.76 2 +66.78 1 +66.79 1 +66.80 2 +66.81 1 +66.83 1 +66.84 1 +66.85 2 +66.88 1 +66.89 3 +66.91 3 +66.93 2 +66.94 1 +66.95 1 +66.98 2 +66.99 1 +67.00 1 +67.02 2 +67.03 1 +67.04 3 +67.05 1 +67.07 1 +67.08 1 +67.09 1 +67.10 1 +67.14 1 +67.16 1 +67.17 1 +67.18 3 +67.20 1 +67.22 2 +67.23 2 +67.24 1 +67.29 2 +67.30 1 +67.34 2 +67.37 1 +67.40 1 +67.41 1 +67.42 1 +67.46 1 +67.50 3 +67.52 1 +67.54 4 +67.55 1 +67.56 1 +67.59 1 +67.60 1 +67.65 1 +67.66 1 +67.69 1 +67.70 1 +67.71 1 +67.73 1 +67.74 2 +67.78 1 +67.80 2 +67.81 2 +67.82 2 +67.83 2 +67.85 2 +67.87 4 +67.89 1 +67.90 2 +67.91 3 +67.93 2 +67.94 1 +67.99 1 +68.00 3 +68.01 3 +68.02 3 +68.04 2 +68.05 1 +68.06 1 +68.07 1 +68.08 1 +68.09 7 +68.10 2 +68.12 1 +68.14 1 +68.15 2 +68.16 1 +68.17 1 +68.18 1 +68.21 3 +68.24 1 +68.25 3 +68.26 2 +68.28 2 +68.30 3 +68.32 1 +68.33 1 +68.35 2 +68.36 1 +68.39 1 +68.40 3 +68.42 1 +68.43 1 +68.45 2 +68.46 1 +68.48 2 +68.49 2 +68.50 1 +68.54 2 +68.57 1 +68.60 1 +68.64 1 +68.67 1 +68.68 1 +68.72 1 +68.74 1 +68.76 2 +68.79 1 +68.80 4 +68.81 2 +68.82 1 +68.83 1 +68.84 2 +68.85 1 +68.87 1 +68.88 1 +68.89 2 +68.90 2 +68.91 1 +68.92 1 +68.94 1 +68.95 2 +68.97 2 +68.98 1 +68.99 1 +69.00 1 +69.01 2 +69.02 2 +69.03 1 +69.05 1 +69.06 3 +69.11 1 +69.13 2 +69.14 3 +69.16 3 +69.17 1 +69.22 1 +69.24 1 +69.26 3 +69.27 2 +69.28 2 +69.29 3 +69.30 2 +69.32 1 +69.33 6 +69.34 1 +69.36 2 +69.37 1 +69.38 1 +69.41 2 +69.42 3 +69.43 1 +69.44 1 +69.45 2 +69.46 2 +69.47 4 +69.48 2 +69.49 1 +69.52 3 +69.53 1 +69.54 1 +69.55 2 +69.58 3 +69.60 2 +69.62 1 +69.64 1 +69.67 1 +69.68 1 +69.72 2 +69.75 2 +69.76 4 +69.78 1 +69.79 2 +69.80 2 +69.81 1 +69.82 1 +69.84 1 +69.85 4 +69.86 1 +69.87 1 +69.88 1 +69.89 1 +69.90 2 +69.91 1 +69.92 2 +69.93 1 +69.94 1 +69.96 1 +69.97 1 +69.98 3 +7.00 1 +7.02 1 +7.03 3 +7.04 2 +7.06 2 +7.08 2 +7.09 1 +7.10 2 +7.11 1 +7.12 1 +7.13 1 +7.14 2 +7.16 1 +7.17 1 +7.19 2 +7.20 1 +7.21 1 +7.22 1 +7.23 1 +7.24 1 +7.26 1 +7.27 2 +7.28 2 +7.30 1 +7.31 1 +7.33 1 +7.34 2 +7.36 4 +7.37 3 +7.38 2 +7.39 3 +7.42 1 +7.45 1 +7.46 2 +7.49 3 +7.50 1 +7.51 1 +7.52 2 +7.54 1 +7.57 2 +7.59 4 +7.61 2 +7.62 1 +7.66 1 +7.67 4 +7.71 1 +7.72 1 +7.73 1 +7.74 1 +7.76 1 +7.80 2 +7.81 1 +7.82 2 +7.83 2 +7.84 1 +7.87 1 +7.88 2 +7.89 2 +7.90 2 +7.92 3 +7.93 1 +7.94 1 +7.95 2 +7.97 3 +7.98 1 +7.99 2 +70.00 1 +70.01 3 +70.02 3 +70.06 2 +70.08 2 +70.10 2 +70.12 1 +70.13 1 +70.14 2 +70.15 1 +70.16 1 +70.25 1 +70.27 1 +70.29 1 +70.31 1 +70.32 2 +70.36 1 +70.38 1 +70.41 1 +70.43 1 +70.45 2 +70.48 1 +70.49 1 +70.50 1 +70.51 2 +70.52 3 +70.53 1 +70.54 2 +70.57 3 +70.58 2 +70.59 1 +70.60 1 +70.62 1 +70.63 2 +70.64 2 +70.66 1 +70.67 2 +70.70 2 +70.74 1 +70.75 1 +70.76 1 +70.78 3 +70.79 1 +70.80 2 +70.81 3 +70.82 1 +70.84 1 +70.87 1 +70.88 1 +70.89 2 +70.90 1 +70.91 2 +70.92 2 +70.93 1 +70.96 1 +71.00 1 +71.01 1 +71.02 1 +71.05 1 +71.07 3 +71.08 1 +71.09 1 +71.12 1 +71.14 1 +71.15 3 +71.16 1 +71.17 1 +71.20 1 +71.21 1 +71.22 1 +71.24 1 +71.25 1 +71.27 1 +71.28 1 +71.29 1 +71.30 1 +71.33 1 +71.35 2 +71.37 1 +71.38 1 +71.39 2 +71.42 1 +71.43 1 +71.45 1 +71.48 1 +71.49 1 +71.51 2 +71.53 1 +71.54 1 +71.55 1 +71.56 1 +71.57 3 +71.59 2 +71.61 1 +71.62 2 +71.65 1 +71.66 5 +71.67 1 +71.72 2 +71.73 1 +71.75 1 +71.76 1 +71.77 1 +71.78 1 +71.79 2 +71.80 2 +71.82 2 +71.83 3 +71.84 2 +71.86 1 +71.87 3 +71.88 1 +71.89 1 +71.90 3 +71.91 1 +71.93 1 +71.94 3 +71.95 1 +71.97 1 +71.99 2 +72.00 2 +72.03 1 +72.04 4 +72.07 1 +72.09 1 +72.10 1 +72.11 1 +72.13 2 +72.14 1 +72.15 1 +72.16 3 +72.17 2 +72.19 1 +72.22 3 +72.23 1 +72.24 1 +72.30 2 +72.31 2 +72.32 1 +72.33 3 +72.36 1 +72.37 2 +72.39 3 +72.41 1 +72.43 1 +72.44 2 +72.45 1 +72.46 2 +72.47 3 +72.48 1 +72.50 2 +72.53 1 +72.54 4 +72.55 3 +72.56 1 +72.58 2 +72.59 2 +72.60 1 +72.62 2 +72.63 1 +72.64 1 +72.65 2 +72.67 2 +72.69 1 +72.70 2 +72.73 1 +72.75 1 +72.76 1 +72.79 1 +72.80 1 +72.82 2 +72.83 2 +72.84 1 +72.86 1 +72.87 2 +72.89 1 +72.90 3 +72.91 1 +72.92 1 +72.95 2 +72.96 1 +72.97 1 +72.99 3 +73.01 1 +73.02 1 +73.03 1 +73.06 2 +73.07 3 +73.08 3 +73.09 2 +73.12 1 +73.13 2 +73.16 2 +73.20 2 +73.21 1 +73.23 1 +73.24 1 +73.27 1 +73.28 1 +73.32 1 +73.35 2 +73.37 1 +73.38 1 +73.39 1 +73.41 2 +73.42 1 +73.45 2 +73.46 3 +73.47 2 +73.48 4 +73.51 2 +73.52 1 +73.54 2 +73.55 1 +73.56 1 +73.58 1 +73.59 1 +73.61 1 +73.64 3 +73.65 1 +73.66 1 +73.67 1 +73.68 1 +73.69 1 +73.70 2 +73.72 1 +73.73 1 +73.75 3 +73.76 1 +73.77 1 +73.79 2 +73.80 3 +73.84 1 +73.85 1 +73.86 1 +73.88 1 +73.89 2 +73.90 1 +73.91 1 +73.93 2 +73.94 1 +73.95 1 +73.97 1 +73.98 2 +73.99 1 +74.00 1 +74.01 1 +74.02 1 +74.06 1 +74.07 1 +74.08 2 +74.09 2 +74.10 1 +74.11 1 +74.12 1 +74.13 3 +74.14 2 +74.16 1 +74.17 2 +74.22 1 +74.23 2 +74.24 1 +74.25 3 +74.28 1 +74.29 2 +74.31 2 +74.32 2 +74.33 1 +74.36 2 +74.38 2 +74.39 1 +74.41 1 +74.42 1 +74.44 2 +74.46 1 +74.47 1 +74.48 3 +74.49 1 +74.50 1 +74.51 1 +74.52 2 +74.53 1 +74.55 1 +74.57 1 +74.58 1 +74.59 2 +74.60 4 +74.61 1 +74.63 1 +74.64 2 +74.67 1 +74.68 1 +74.69 1 +74.72 1 +74.73 4 +74.75 2 +74.76 1 +74.77 1 +74.79 1 +74.80 1 +74.81 1 +74.83 1 +74.84 1 +74.85 1 +74.89 1 +74.91 1 +74.92 1 +74.93 1 +74.95 2 +74.97 1 +74.98 1 +75.00 1 +75.02 2 +75.03 1 +75.04 4 +75.06 3 +75.07 1 +75.09 2 +75.10 3 +75.11 3 +75.14 3 +75.15 3 +75.18 2 +75.20 1 +75.21 1 +75.22 1 +75.23 1 +75.25 1 +75.26 1 +75.27 1 +75.29 1 +75.30 1 +75.32 1 +75.33 2 +75.36 1 +75.39 3 +75.40 2 +75.42 1 +75.43 1 +75.44 1 +75.45 2 +75.46 1 +75.48 2 +75.50 1 +75.51 1 +75.52 1 +75.53 2 +75.55 1 +75.56 1 +75.57 2 +75.58 1 +75.59 1 +75.61 1 +75.62 2 +75.64 2 +75.65 1 +75.66 2 +75.68 1 +75.69 1 +75.70 2 +75.71 2 +75.73 2 +75.74 1 +75.75 1 +75.76 2 +75.77 2 +75.78 2 +75.79 1 +75.80 3 +75.81 1 +75.82 1 +75.84 2 +75.85 1 +75.86 1 +75.87 2 +75.88 4 +75.90 1 +75.91 1 +75.92 1 +75.95 2 +75.97 3 +75.99 2 +76.00 2 +76.01 1 +76.02 1 +76.03 1 +76.04 1 +76.06 3 +76.07 1 +76.09 4 +76.11 1 +76.12 1 +76.13 1 +76.14 1 +76.16 2 +76.17 3 +76.18 2 +76.19 3 +76.20 4 +76.21 3 +76.24 1 +76.25 1 +76.26 1 +76.27 1 +76.28 1 +76.29 2 +76.30 1 +76.31 3 +76.33 1 +76.34 1 +76.35 2 +76.37 1 +76.39 1 +76.40 2 +76.44 1 +76.45 1 +76.46 1 +76.49 1 +76.50 1 +76.51 2 +76.52 1 +76.53 1 +76.54 3 +76.57 2 +76.58 3 +76.60 1 +76.61 1 +76.63 2 +76.64 3 +76.65 3 +76.67 1 +76.70 1 +76.72 2 +76.73 1 +76.75 2 +76.76 1 +76.77 2 +76.79 1 +76.80 3 +76.81 1 +76.83 2 +76.87 2 +76.88 1 +76.89 1 +76.92 1 +76.93 2 +76.95 4 +76.97 1 +76.98 2 +76.99 1 +77.00 1 +77.01 1 +77.02 1 +77.03 1 +77.06 1 +77.07 3 +77.09 1 +77.10 1 +77.12 1 +77.13 2 +77.16 2 +77.17 1 +77.18 3 +77.21 2 +77.22 1 +77.23 1 +77.24 1 +77.25 1 +77.26 1 +77.27 1 +77.29 2 +77.30 1 +77.31 1 +77.32 1 +77.33 1 +77.34 1 +77.35 1 +77.38 1 +77.39 2 +77.40 3 +77.41 1 +77.43 1 +77.44 3 +77.45 1 +77.48 1 +77.51 1 +77.52 2 +77.53 1 +77.55 2 +77.56 2 +77.57 3 +77.59 1 +77.60 1 +77.61 2 +77.62 2 +77.63 2 +77.64 2 +77.66 3 +77.72 1 +77.75 5 +77.76 1 +77.77 2 +77.78 2 +77.79 1 +77.80 1 +77.83 1 +77.84 1 +77.85 1 +77.86 1 +77.87 2 +77.88 2 +77.92 2 +77.93 2 +77.95 2 +77.96 1 +77.97 1 +77.99 1 +78.01 1 +78.05 3 +78.07 2 +78.11 1 +78.13 1 +78.14 1 +78.15 1 +78.16 1 +78.17 1 +78.18 1 +78.22 2 +78.24 2 +78.25 1 +78.28 1 +78.30 1 +78.32 1 +78.33 1 +78.35 2 +78.36 3 +78.39 1 +78.40 1 +78.41 2 +78.43 1 +78.45 1 +78.46 1 +78.48 2 +78.52 1 +78.53 1 +78.55 3 +78.56 3 +78.57 1 +78.59 1 +78.60 1 +78.62 2 +78.65 1 +78.66 1 +78.69 2 +78.73 2 +78.75 3 +78.76 1 +78.78 1 +78.80 1 +78.83 2 +78.84 2 +78.85 3 +78.86 2 +78.90 1 +78.91 1 +78.92 3 +78.93 2 +78.95 1 +78.97 1 +78.98 1 +79.00 1 +79.02 4 +79.03 2 +79.04 3 +79.06 1 +79.07 2 +79.08 4 +79.09 3 +79.10 2 +79.11 1 +79.13 2 +79.14 1 +79.15 2 +79.16 2 +79.17 1 +79.20 2 +79.21 1 +79.22 1 +79.25 1 +79.26 1 +79.28 2 +79.29 1 +79.30 1 +79.32 1 +79.33 2 +79.34 1 +79.37 1 +79.38 1 +79.39 1 +79.40 2 +79.41 2 +79.42 1 +79.44 1 +79.45 2 +79.46 2 +79.48 1 +79.49 1 +79.50 1 +79.51 2 +79.53 1 +79.55 1 +79.56 1 +79.57 2 +79.58 1 +79.59 1 +79.60 3 +79.63 2 +79.64 1 +79.65 3 +79.66 1 +79.67 1 +79.69 1 +79.74 1 +79.75 1 +79.77 2 +79.80 2 +79.81 1 +79.83 1 +79.84 1 +79.85 3 +79.88 1 +79.89 1 +79.90 1 +79.91 1 +79.92 2 +79.93 2 +79.94 1 +79.95 1 +79.96 2 +79.99 1 +8.00 1 +8.01 1 +8.02 1 +8.03 1 +8.04 1 +8.06 3 +8.07 1 +8.08 1 +8.09 1 +8.12 2 +8.13 1 +8.16 1 +8.17 1 +8.22 1 +8.25 1 +8.26 1 +8.27 1 +8.28 2 +8.30 2 +8.31 1 +8.33 1 +8.34 1 +8.35 2 +8.38 2 +8.39 1 +8.40 1 +8.43 1 +8.44 1 +8.46 1 +8.48 4 +8.49 1 +8.52 1 +8.53 1 +8.54 1 +8.55 2 +8.57 3 +8.58 1 +8.59 1 +8.60 1 +8.62 1 +8.63 2 +8.64 3 +8.65 4 +8.66 1 +8.67 1 +8.68 1 +8.69 3 +8.71 1 +8.72 1 +8.73 2 +8.74 1 +8.76 3 +8.77 2 +8.78 2 +8.80 1 +8.84 2 +8.85 2 +8.86 3 +8.88 1 +8.89 2 +8.90 3 +8.91 3 +8.93 1 +8.94 1 +8.95 2 +8.96 1 +8.97 1 +8.98 2 +8.99 1 +80.00 1 +80.01 1 +80.02 1 +80.05 2 +80.08 2 +80.09 5 +80.10 1 +80.11 3 +80.12 1 +80.13 2 +80.14 1 +80.15 1 +80.21 1 +80.24 2 +80.26 1 +80.27 1 +80.30 1 +80.31 2 +80.36 1 +80.38 1 +80.40 2 +80.42 1 +80.43 1 +80.44 1 +80.45 2 +80.47 1 +80.48 2 +80.50 1 +80.51 2 +80.52 1 +80.54 1 +80.55 2 +80.56 1 +80.59 1 +80.61 2 +80.62 1 +80.63 1 +80.64 1 +80.68 1 +80.69 3 +80.71 1 +80.72 2 +80.73 2 +80.76 2 +80.77 2 +80.80 1 +80.81 4 +80.82 3 +80.83 3 +80.84 1 +80.85 1 +80.88 1 +80.90 1 +80.93 1 +80.94 1 +80.95 1 +80.96 3 +80.97 2 +80.98 1 +80.99 2 +81.02 2 +81.03 3 +81.04 3 +81.06 1 +81.10 1 +81.11 1 +81.13 2 +81.14 1 +81.15 1 +81.16 2 +81.18 1 +81.19 1 +81.20 1 +81.21 2 +81.22 1 +81.23 3 +81.24 1 +81.26 1 +81.27 2 +81.28 3 +81.29 1 +81.30 1 +81.35 3 +81.36 1 +81.37 3 +81.38 1 +81.39 1 +81.40 1 +81.41 1 +81.42 2 +81.43 1 +81.44 3 +81.49 1 +81.50 1 +81.51 3 +81.53 1 +81.54 2 +81.57 1 +81.58 2 +81.59 1 +81.60 1 +81.61 1 +81.66 1 +81.67 1 +81.68 1 +81.69 1 +81.70 2 +81.71 1 +81.72 1 +81.74 2 +81.76 1 +81.78 1 +81.80 1 +81.81 1 +81.85 2 +81.87 2 +81.88 1 +81.89 1 +81.90 1 +81.91 1 +81.93 2 +81.95 2 +81.96 3 +81.98 2 +81.99 2 +82.00 1 +82.02 2 +82.03 1 +82.04 2 +82.05 1 +82.06 2 +82.07 1 +82.08 2 +82.10 2 +82.12 2 +82.16 1 +82.19 1 +82.20 2 +82.21 2 +82.23 1 +82.24 2 +82.25 2 +82.28 2 +82.29 2 +82.31 1 +82.32 1 +82.33 2 +82.34 1 +82.36 2 +82.37 1 +82.39 4 +82.40 1 +82.41 1 +82.43 1 +82.44 1 +82.46 1 +82.47 1 +82.48 3 +82.49 1 +82.50 1 +82.52 1 +82.53 4 +82.54 1 +82.56 1 +82.57 1 +82.59 1 +82.60 1 +82.61 2 +82.62 1 +82.64 2 +82.67 2 +82.68 1 +82.69 1 +82.70 3 +82.71 2 +82.72 1 +82.73 1 +82.74 2 +82.75 1 +82.78 2 +82.79 3 +82.83 3 +82.85 1 +82.87 1 +82.89 1 +82.90 2 +82.91 1 +82.92 1 +82.93 1 +82.94 3 +82.95 1 +82.96 2 +82.98 2 +82.99 1 +83.00 2 +83.01 2 +83.03 3 +83.04 1 +83.05 1 +83.06 2 +83.08 1 +83.09 1 +83.10 3 +83.12 1 +83.13 2 +83.15 2 +83.16 1 +83.18 1 +83.19 1 +83.20 1 +83.21 5 +83.22 1 +83.23 1 +83.24 1 +83.26 3 +83.27 2 +83.29 2 +83.31 1 +83.33 2 +83.36 1 +83.37 2 +83.38 1 +83.39 1 +83.40 1 +83.42 2 +83.43 1 +83.44 2 +83.46 2 +83.47 1 +83.48 3 +83.49 1 +83.50 2 +83.51 1 +83.52 2 +83.53 1 +83.54 1 +83.60 1 +83.61 1 +83.62 1 +83.63 2 +83.64 3 +83.65 1 +83.66 2 +83.67 1 +83.69 1 +83.71 1 +83.73 1 +83.75 1 +83.76 1 +83.78 1 +83.79 2 +83.80 2 +83.81 1 +83.82 3 +83.83 1 +83.85 1 +83.86 2 +83.89 2 +83.92 2 +83.93 1 +83.94 2 +83.95 4 +83.96 1 +84.01 2 +84.02 2 +84.03 2 +84.05 1 +84.07 2 +84.08 2 +84.10 2 +84.12 2 +84.13 1 +84.17 3 +84.18 1 +84.20 2 +84.23 2 +84.25 1 +84.26 2 +84.27 1 +84.28 1 +84.29 2 +84.32 1 +84.35 3 +84.37 3 +84.38 1 +84.40 3 +84.41 2 +84.42 3 +84.43 1 +84.44 2 +84.45 1 +84.46 1 +84.47 1 +84.50 1 +84.55 1 +84.56 1 +84.57 1 +84.58 1 +84.60 3 +84.61 2 +84.62 1 +84.63 2 +84.64 1 +84.65 1 +84.69 2 +84.70 1 +84.71 1 +84.72 1 +84.73 1 +84.75 1 +84.76 1 +84.78 1 +84.79 1 +84.80 2 +84.81 1 +84.83 2 +84.86 1 +84.87 1 +84.88 2 +84.90 3 +84.91 1 +84.92 1 +84.93 2 +84.95 2 +84.96 2 +84.97 3 +84.99 1 +85.00 1 +85.01 3 +85.02 1 +85.08 1 +85.09 2 +85.13 1 +85.16 1 +85.17 1 +85.19 1 +85.21 1 +85.22 2 +85.25 1 +85.26 5 +85.27 1 +85.28 1 +85.29 1 +85.31 1 +85.34 2 +85.36 1 +85.37 1 +85.38 2 +85.39 2 +85.40 3 +85.41 1 +85.43 3 +85.44 2 +85.48 1 +85.49 1 +85.50 2 +85.52 3 +85.54 1 +85.55 3 +85.58 1 +85.59 1 +85.60 1 +85.61 1 +85.62 1 +85.63 1 +85.64 2 +85.65 4 +85.67 1 +85.68 1 +85.70 2 +85.73 1 +85.74 1 +85.75 1 +85.76 1 +85.79 1 +85.80 1 +85.82 2 +85.83 2 +85.85 1 +85.86 1 +85.87 1 +85.88 2 +85.89 1 +85.90 1 +85.91 1 +85.92 1 +85.93 1 +85.94 2 +85.97 1 +85.98 1 +85.99 1 +86.02 2 +86.04 1 +86.10 1 +86.11 1 +86.12 2 +86.13 1 +86.14 1 +86.15 2 +86.18 1 +86.19 3 +86.20 1 +86.21 1 +86.23 2 +86.24 1 +86.25 1 +86.26 2 +86.27 2 +86.28 1 +86.29 3 +86.33 1 +86.34 2 +86.38 2 +86.39 1 +86.40 1 +86.41 2 +86.43 2 +86.44 1 +86.49 2 +86.50 1 +86.51 3 +86.52 2 +86.53 1 +86.54 1 +86.58 1 +86.59 1 +86.60 3 +86.61 2 +86.64 1 +86.65 2 +86.66 2 +86.69 1 +86.70 3 +86.71 2 +86.72 1 +86.73 1 +86.75 3 +86.76 1 +86.78 5 +86.79 2 +86.82 1 +86.84 2 +86.85 6 +86.86 5 +86.87 1 +86.90 1 +86.92 1 +86.93 1 +86.94 1 +86.95 1 +86.97 1 +87.00 1 +87.01 1 +87.02 1 +87.04 1 +87.05 2 +87.06 1 +87.07 1 +87.10 1 +87.11 1 +87.12 1 +87.16 1 +87.17 2 +87.18 1 +87.19 2 +87.21 2 +87.24 1 +87.28 1 +87.29 3 +87.30 1 +87.31 1 +87.32 2 +87.34 1 +87.36 1 +87.37 1 +87.38 2 +87.41 3 +87.42 1 +87.43 2 +87.44 1 +87.47 2 +87.48 1 +87.49 1 +87.50 2 +87.52 5 +87.56 1 +87.57 2 +87.58 2 +87.59 1 +87.60 2 +87.61 1 +87.62 1 +87.63 1 +87.65 2 +87.66 1 +87.67 3 +87.69 1 +87.70 1 +87.71 2 +87.72 1 +87.73 1 +87.75 1 +87.76 2 +87.78 2 +87.79 1 +87.80 1 +87.83 1 +87.84 2 +87.86 1 +87.89 2 +87.90 1 +87.91 1 +87.92 1 +87.93 1 +87.95 1 +87.96 2 +87.97 3 +87.99 1 +88.01 1 +88.03 1 +88.04 1 +88.05 2 +88.06 2 +88.08 2 +88.09 2 +88.10 3 +88.11 4 +88.12 1 +88.13 1 +88.14 1 +88.16 1 +88.18 1 +88.19 2 +88.20 1 +88.23 3 +88.24 1 +88.25 1 +88.29 2 +88.30 1 +88.31 2 +88.34 1 +88.35 1 +88.38 3 +88.39 2 +88.42 1 +88.45 2 +88.46 3 +88.50 2 +88.51 2 +88.52 1 +88.55 2 +88.56 1 +88.58 1 +88.60 3 +88.61 1 +88.64 1 +88.65 2 +88.67 1 +88.68 1 +88.69 3 +88.73 1 +88.74 2 +88.75 3 +88.78 2 +88.79 2 +88.80 3 +88.81 1 +88.83 3 +88.87 1 +88.88 1 +88.90 1 +88.92 1 +88.93 1 +88.94 2 +88.95 1 +88.96 1 +88.99 2 +89.00 3 +89.02 1 +89.05 1 +89.06 1 +89.08 1 +89.09 1 +89.11 1 +89.12 1 +89.14 5 +89.16 2 +89.18 2 +89.20 1 +89.23 3 +89.24 1 +89.25 2 +89.26 1 +89.28 1 +89.29 1 +89.31 2 +89.32 3 +89.34 1 +89.35 3 +89.38 1 +89.39 1 +89.40 4 +89.41 1 +89.46 1 +89.49 1 +89.50 2 +89.51 1 +89.52 3 +89.53 2 +89.54 3 +89.55 1 +89.58 2 +89.60 4 +89.61 1 +89.62 3 +89.64 1 +89.66 2 +89.67 3 +89.71 1 +89.73 1 +89.75 1 +89.77 3 +89.79 2 +89.80 1 +89.81 1 +89.84 2 +89.86 1 +89.87 3 +89.89 1 +89.92 1 +89.93 1 +89.94 1 +89.95 2 +89.97 2 +89.98 1 +89.99 1 +9.00 1 +9.01 2 +9.02 5 +9.04 1 +9.05 2 +9.06 1 +9.07 2 +9.08 1 +9.09 1 +9.10 2 +9.11 1 +9.12 1 +9.13 1 +9.14 1 +9.15 1 +9.17 1 +9.18 2 +9.20 1 +9.24 2 +9.25 1 +9.29 2 +9.33 1 +9.35 2 +9.37 2 +9.39 1 +9.42 1 +9.43 1 +9.44 1 +9.45 1 +9.46 1 +9.47 3 +9.49 2 +9.50 1 +9.52 1 +9.53 1 +9.55 1 +9.56 1 +9.58 2 +9.59 2 +9.60 1 +9.63 1 +9.64 1 +9.67 1 +9.71 1 +9.73 1 +9.74 1 +9.76 1 +9.77 1 +9.78 1 +9.82 2 +9.84 2 +9.85 1 +9.86 1 +9.87 1 +9.88 2 +9.89 2 +9.91 1 +9.92 3 +9.93 1 +9.94 1 +9.95 1 +9.96 1 +9.97 1 +9.98 2 +9.99 2 +90.01 3 +90.02 3 +90.05 1 +90.08 2 +90.09 2 +90.13 1 +90.15 2 +90.16 2 +90.17 1 +90.19 2 +90.21 1 +90.22 1 +90.24 1 +90.26 2 +90.27 1 +90.28 1 +90.29 1 +90.30 2 +90.31 2 +90.33 1 +90.34 2 +90.36 3 +90.37 2 +90.38 5 +90.39 2 +90.40 2 +90.42 1 +90.43 1 +90.45 2 +90.46 1 +90.47 1 +90.50 1 +90.51 2 +90.53 1 +90.55 2 +90.56 3 +90.58 1 +90.59 1 +90.62 1 +90.64 1 +90.65 1 +90.66 2 +90.68 1 +90.69 2 +90.70 2 +90.71 1 +90.72 1 +90.73 1 +90.74 2 +90.75 1 +90.77 1 +90.78 1 +90.79 1 +90.82 1 +90.83 2 +90.84 1 +90.85 1 +90.87 1 +90.88 2 +90.89 2 +90.90 2 +90.91 2 +90.93 1 +90.95 2 +90.98 1 +91.00 2 +91.03 3 +91.05 1 +91.07 1 +91.08 1 +91.09 3 +91.10 1 +91.11 2 +91.12 2 +91.13 1 +91.15 1 +91.16 1 +91.19 2 +91.22 1 +91.28 1 +91.30 1 +91.31 1 +91.33 2 +91.34 1 +91.35 2 +91.37 3 +91.38 2 +91.40 1 +91.41 1 +91.42 1 +91.43 2 +91.44 1 +91.45 1 +91.46 2 +91.47 1 +91.49 1 +91.51 4 +91.52 1 +91.54 1 +91.56 3 +91.57 1 +91.58 1 +91.59 1 +91.60 1 +91.66 1 +91.67 1 +91.68 1 +91.69 1 +91.70 1 +91.71 1 +91.72 1 +91.75 1 +91.76 1 +91.77 1 +91.80 1 +91.81 1 +91.85 2 +91.87 1 +91.89 1 +91.90 2 +91.93 1 +91.94 1 +91.96 1 +91.98 1 +92.01 2 +92.03 1 +92.05 1 +92.06 1 +92.08 1 +92.13 1 +92.14 1 +92.16 1 +92.17 1 +92.18 1 +92.21 1 +92.22 3 +92.23 1 +92.24 2 +92.27 1 +92.28 1 +92.29 1 +92.30 1 +92.31 1 +92.32 1 +92.34 1 +92.35 1 +92.36 1 +92.38 1 +92.39 1 +92.40 1 +92.41 1 +92.42 1 +92.44 1 +92.47 2 +92.48 1 +92.50 2 +92.52 1 +92.53 1 +92.56 2 +92.57 1 +92.59 2 +92.60 1 +92.61 2 +92.63 2 +92.64 1 +92.67 1 +92.71 2 +92.72 2 +92.73 1 +92.74 3 +92.75 1 +92.78 1 +92.79 2 +92.81 2 +92.82 2 +92.86 1 +92.88 1 +92.89 1 +92.90 1 +92.91 1 +92.92 2 +92.93 2 +92.94 2 +92.95 3 +92.97 3 +92.99 1 +93.00 1 +93.01 3 +93.03 3 +93.05 3 +93.08 1 +93.09 2 +93.10 1 +93.11 2 +93.13 1 +93.16 2 +93.17 2 +93.18 1 +93.19 1 +93.21 2 +93.22 1 +93.23 1 +93.24 3 +93.26 1 +93.27 1 +93.28 1 +93.29 2 +93.30 2 +93.31 1 +93.32 1 +93.35 1 +93.36 1 +93.38 1 +93.39 1 +93.40 1 +93.41 3 +93.43 2 +93.45 1 +93.46 2 +93.48 1 +93.49 3 +93.50 1 +93.52 2 +93.54 1 +93.55 1 +93.57 2 +93.58 1 +93.60 1 +93.62 2 +93.63 1 +93.64 1 +93.65 2 +93.66 2 +93.68 1 +93.69 1 +93.70 2 +93.71 1 +93.73 2 +93.74 1 +93.77 2 +93.78 1 +93.79 3 +93.81 1 +93.83 2 +93.84 2 +93.85 2 +93.86 1 +93.87 1 +93.89 1 +93.90 1 +93.93 2 +93.95 3 +93.96 1 +93.97 3 +93.98 2 +93.99 2 +94.01 1 +94.02 1 +94.03 4 +94.04 1 +94.05 1 +94.06 1 +94.09 2 +94.11 1 +94.12 1 +94.13 3 +94.14 1 +94.16 2 +94.17 1 +94.23 2 +94.25 1 +94.26 1 +94.30 4 +94.32 1 +94.33 1 +94.34 2 +94.35 2 +94.39 2 +94.42 1 +94.43 1 +94.44 3 +94.45 1 +94.46 2 +94.47 6 +94.48 1 +94.49 1 +94.50 1 +94.51 1 +94.52 1 +94.53 1 +94.56 1 +94.58 2 +94.59 2 +94.60 3 +94.61 2 +94.62 1 +94.63 4 +94.68 1 +94.69 2 +94.71 1 +94.73 1 +94.74 1 +94.75 1 +94.77 2 +94.78 1 +94.80 1 +94.82 2 +94.83 2 +94.85 1 +94.86 2 +94.87 1 +94.88 2 +94.89 1 +94.90 2 +94.91 3 +94.92 1 +94.95 1 +94.97 1 +94.99 1 +95.02 1 +95.05 1 +95.06 2 +95.07 3 +95.08 1 +95.09 1 +95.10 1 +95.11 2 +95.12 4 +95.13 2 +95.14 2 +95.15 1 +95.16 1 +95.20 2 +95.21 1 +95.23 1 +95.24 1 +95.26 1 +95.28 1 +95.32 1 +95.33 1 +95.34 2 +95.37 1 +95.38 1 +95.39 1 +95.40 2 +95.41 1 +95.43 1 +95.45 3 +95.46 4 +95.48 2 +95.51 1 +95.52 1 +95.54 2 +95.56 2 +95.57 1 +95.58 2 +95.59 1 +95.61 3 +95.62 1 +95.63 2 +95.64 1 +95.65 1 +95.67 3 +95.68 1 +95.69 4 +95.70 1 +95.71 2 +95.72 1 +95.74 1 +95.79 1 +95.80 2 +95.81 3 +95.83 2 +95.84 1 +95.87 3 +95.88 1 +95.89 3 +95.90 2 +95.93 1 +95.94 3 +95.99 1 +96.01 1 +96.02 1 +96.04 1 +96.05 2 +96.06 2 +96.07 2 +96.08 1 +96.09 1 +96.10 2 +96.13 2 +96.20 2 +96.21 1 +96.22 1 +96.23 1 +96.24 1 +96.25 1 +96.26 1 +96.28 2 +96.30 2 +96.31 1 +96.32 1 +96.34 2 +96.35 2 +96.36 1 +96.38 1 +96.39 2 +96.42 1 +96.43 1 +96.44 1 +96.45 5 +96.46 1 +96.47 1 +96.48 2 +96.50 1 +96.51 1 +96.52 1 +96.53 2 +96.54 1 +96.55 1 +96.57 1 +96.58 1 +96.59 1 +96.60 4 +96.61 1 +96.62 2 +96.63 1 +96.64 2 +96.66 2 +96.68 2 +96.69 2 +96.71 2 +96.72 1 +96.74 2 +96.76 4 +96.77 1 +96.79 1 +96.80 2 +96.81 2 +96.82 1 +96.83 1 +96.85 2 +96.87 1 +96.89 3 +96.90 2 +96.92 1 +96.95 2 +96.96 1 +96.97 1 +96.98 2 +96.99 1 +97.01 1 +97.03 2 +97.07 1 +97.09 1 +97.10 2 +97.11 3 +97.15 2 +97.16 1 +97.18 2 +97.20 2 +97.21 1 +97.22 2 +97.24 1 +97.25 2 +97.26 1 +97.28 2 +97.29 1 +97.31 1 +97.32 2 +97.34 1 +97.35 1 +97.37 2 +97.38 2 +97.39 1 +97.41 2 +97.42 4 +97.43 1 +97.48 2 +97.49 2 +97.50 2 +97.52 2 +97.53 1 +97.55 1 +97.56 4 +97.57 1 +97.59 1 +97.60 1 +97.62 1 +97.64 1 +97.65 1 +97.66 1 +97.68 1 +97.69 2 +97.70 2 +97.72 1 +97.74 3 +97.75 1 +97.76 1 +97.77 1 +97.78 1 +97.81 2 +97.82 1 +97.84 1 +97.85 1 +97.86 2 +97.87 1 +97.88 1 +97.91 1 +97.94 2 +97.96 1 +97.98 2 +97.99 2 +98.00 1 +98.01 1 +98.03 1 +98.04 1 +98.06 3 +98.07 2 +98.08 1 +98.10 3 +98.11 3 +98.12 1 +98.13 3 +98.15 3 +98.16 1 +98.18 1 +98.20 1 +98.22 1 +98.23 1 +98.24 2 +98.25 3 +98.26 2 +98.28 2 +98.29 2 +98.32 1 +98.33 1 +98.35 2 +98.36 2 +98.38 2 +98.39 1 +98.41 1 +98.42 3 +98.43 1 +98.44 1 +98.46 1 +98.48 2 +98.50 1 +98.51 3 +98.52 1 +98.53 1 +98.54 1 +98.56 1 +98.58 1 +98.60 1 +98.63 1 +98.64 2 +98.67 3 +98.68 2 +98.70 1 +98.72 1 +98.73 1 +98.76 2 +98.77 1 +98.79 1 +98.80 2 +98.82 1 +98.83 1 +98.84 1 +98.86 2 +98.87 2 +98.88 1 +98.89 1 +98.90 5 +98.91 1 +98.93 1 +98.97 1 +98.99 2 +99.00 3 +99.01 1 +99.02 1 +99.03 1 +99.04 1 +99.05 3 +99.06 1 +99.08 3 +99.10 1 +99.12 1 +99.13 1 +99.14 1 +99.16 1 +99.18 2 +99.19 2 +99.20 1 +99.21 1 +99.22 1 +99.23 1 +99.24 2 +99.27 2 +99.28 3 +99.29 1 +99.31 3 +99.32 1 +99.33 3 +99.34 2 +99.35 2 +99.37 3 +99.38 1 +99.40 1 +99.41 1 +99.42 3 +99.44 1 +99.46 2 +99.48 1 +99.50 4 +99.51 1 +99.52 1 +99.53 2 +99.54 1 +99.55 3 +99.56 2 +99.57 2 +99.58 3 +99.60 1 +99.62 1 +99.63 2 +99.64 2 +99.66 1 +99.67 1 +99.69 1 +99.76 2 +99.77 1 +99.78 1 +99.79 2 +99.80 1 +99.82 2 +99.84 1 +99.85 1 +99.86 3 +99.87 3 +99.88 1 +99.89 1 +99.90 2 +99.92 1 +99.94 2 +99.95 1 +99.96 1 +99.98 1 +99.99 1 +PREHOOK: query: explain vectorization expression +select `dec`, count(bin) from over10k group by `dec` +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select `dec`, count(bin) from over10k group by `dec` +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: dec (type: decimal(4,2)), bin (type: binary) + outputColumnNames: dec, bin + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [9, 10] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(bin) + Group By Vectorization: + aggregators: VectorUDAFCount(col 10:binary) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2) + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: dec (type: decimal(4,2)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Map-reduce partition columns: _col0 (type: decimal(4,2)) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: decimal(4,2)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select `dec`, count(bin) from over10k group by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select `dec`, count(bin) from over10k group by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +0.01 2 +0.02 1 +0.03 2 +0.04 1 +0.05 1 +0.06 3 +0.07 1 +0.08 3 +0.10 1 +0.11 1 +0.15 1 +0.17 1 +0.21 2 +0.22 1 +0.23 2 +0.24 1 +0.25 2 +0.26 1 +0.27 1 +0.29 1 +0.30 2 +0.31 3 +0.32 1 +0.33 1 +0.34 3 +0.35 2 +0.36 1 +0.37 2 +0.38 3 +0.39 1 +0.40 2 +0.42 1 +0.45 2 +0.46 3 +0.47 1 +0.50 1 +0.51 1 +0.55 1 +0.56 4 +0.57 1 +0.58 1 +0.64 3 +0.66 1 +0.67 1 +0.68 1 +0.70 1 +0.72 1 +0.73 1 +0.75 1 +0.77 1 +0.79 1 +0.80 2 +0.81 1 +0.83 1 +0.84 1 +0.85 1 +0.86 1 +0.87 1 +0.88 1 +0.89 1 +0.90 1 +0.91 2 +0.92 2 +0.93 1 +0.94 1 +0.96 1 +0.97 4 +0.98 2 +0.99 2 +1.00 2 +1.01 2 +1.04 1 +1.05 1 +1.07 2 +1.10 2 +1.11 1 +1.12 1 +1.13 1 +1.15 1 +1.16 1 +1.17 1 +1.19 1 +1.20 2 +1.22 2 +1.23 2 +1.24 2 +1.25 3 +1.27 1 +1.29 3 +1.31 1 +1.32 1 +1.33 1 +1.34 2 +1.35 2 +1.36 2 +1.37 2 +1.38 2 +1.39 1 +1.40 2 +1.42 2 +1.43 3 +1.45 2 +1.46 1 +1.48 1 +1.49 3 +1.50 2 +1.52 1 +1.55 2 +1.56 2 +1.59 1 +1.62 1 +1.65 1 +1.66 1 +1.67 1 +1.68 2 +1.69 1 +1.70 1 +1.72 4 +1.74 1 +1.75 2 +1.76 2 +1.77 1 +1.78 1 +1.79 1 +1.81 2 +1.82 1 +1.85 1 +1.86 1 +1.89 1 +1.94 1 +1.97 3 +1.99 1 +10.00 2 +10.03 2 +10.04 1 +10.06 1 +10.07 1 +10.08 2 +10.11 3 +10.12 1 +10.14 2 +10.15 2 +10.16 4 +10.20 2 +10.22 1 +10.28 1 +10.29 1 +10.30 1 +10.32 4 +10.33 1 +10.34 1 +10.35 3 +10.36 2 +10.37 1 +10.38 1 +10.39 1 +10.40 3 +10.41 1 +10.42 2 +10.43 1 +10.44 1 +10.45 1 +10.47 2 +10.48 2 +10.50 1 +10.51 1 +10.52 4 +10.54 1 +10.56 2 +10.59 2 +10.60 1 +10.62 1 +10.63 3 +10.65 1 +10.66 1 +10.67 2 +10.69 1 +10.70 2 +10.71 1 +10.72 2 +10.74 3 +10.75 1 +10.76 2 +10.77 2 +10.78 3 +10.79 1 +10.80 1 +10.81 1 +10.82 1 +10.83 1 +10.86 4 +10.88 3 +10.90 1 +10.91 2 +10.93 2 +10.94 2 +10.95 1 +10.98 1 +10.99 2 +11.00 1 +11.02 2 +11.04 1 +11.06 2 +11.10 1 +11.11 1 +11.12 2 +11.14 1 +11.15 3 +11.16 3 +11.17 1 +11.19 1 +11.20 4 +11.21 3 +11.23 2 +11.24 2 +11.25 1 +11.26 1 +11.27 1 +11.28 2 +11.29 1 +11.31 2 +11.32 2 +11.34 2 +11.36 1 +11.37 6 +11.38 2 +11.40 3 +11.41 1 +11.43 2 +11.44 1 +11.45 2 +11.46 1 +11.54 1 +11.56 2 +11.58 1 +11.61 1 +11.62 2 +11.63 2 +11.64 1 +11.65 3 +11.66 2 +11.67 1 +11.69 4 +11.70 2 +11.71 2 +11.73 1 +11.74 1 +11.76 1 +11.77 1 +11.78 3 +11.80 1 +11.81 3 +11.82 1 +11.83 1 +11.86 1 +11.87 5 +11.90 2 +11.91 2 +11.93 3 +11.95 1 +11.99 2 +12.03 2 +12.04 1 +12.05 1 +12.07 3 +12.08 2 +12.10 1 +12.11 2 +12.12 1 +12.13 1 +12.14 1 +12.15 1 +12.18 1 +12.22 1 +12.23 1 +12.26 1 +12.27 1 +12.30 2 +12.31 1 +12.34 1 +12.36 2 +12.38 1 +12.39 1 +12.40 1 +12.41 1 +12.43 1 +12.44 1 +12.45 1 +12.47 1 +12.48 2 +12.50 1 +12.51 1 +12.53 2 +12.54 1 +12.55 2 +12.56 1 +12.58 1 +12.59 2 +12.60 1 +12.61 3 +12.63 1 +12.65 2 +12.66 1 +12.67 3 +12.71 1 +12.72 3 +12.73 1 +12.75 1 +12.76 3 +12.77 3 +12.78 2 +12.79 1 +12.81 1 +12.83 1 +12.86 1 +12.87 1 +12.92 5 +12.95 1 +12.98 1 +12.99 2 +13.01 1 +13.03 1 +13.04 4 +13.05 1 +13.06 2 +13.07 2 +13.08 2 +13.09 2 +13.10 2 +13.11 1 +13.12 1 +13.13 1 +13.15 2 +13.16 2 +13.18 1 +13.19 1 +13.20 2 +13.21 1 +13.23 1 +13.25 2 +13.26 1 +13.27 1 +13.28 1 +13.29 1 +13.30 1 +13.32 2 +13.33 1 +13.34 1 +13.35 1 +13.36 1 +13.38 2 +13.40 1 +13.41 1 +13.43 1 +13.44 1 +13.45 1 +13.47 1 +13.49 2 +13.51 1 +13.56 2 +13.58 1 +13.59 1 +13.60 1 +13.61 1 +13.68 1 +13.70 1 +13.71 1 +13.72 1 +13.75 1 +13.77 1 +13.78 2 +13.80 1 +13.81 3 +13.82 1 +13.85 1 +13.86 2 +13.88 1 +13.89 2 +13.90 1 +13.91 2 +13.92 2 +13.93 1 +13.94 2 +13.95 1 +13.96 2 +13.97 3 +14.00 2 +14.01 3 +14.03 1 +14.04 2 +14.07 1 +14.08 1 +14.09 2 +14.10 2 +14.12 2 +14.14 3 +14.16 2 +14.17 1 +14.18 1 +14.19 1 +14.21 2 +14.23 2 +14.24 2 +14.26 1 +14.27 1 +14.28 3 +14.32 3 +14.33 2 +14.35 5 +14.36 1 +14.38 1 +14.39 2 +14.40 3 +14.41 1 +14.42 2 +14.46 1 +14.47 1 +14.48 1 +14.49 2 +14.52 1 +14.54 1 +14.55 3 +14.56 1 +14.57 1 +14.58 1 +14.59 1 +14.63 1 +14.64 1 +14.65 1 +14.66 2 +14.68 2 +14.69 4 +14.71 1 +14.73 1 +14.74 2 +14.75 1 +14.76 1 +14.78 2 +14.80 1 +14.81 1 +14.83 2 +14.85 1 +14.88 2 +14.89 1 +14.91 3 +14.92 1 +14.94 3 +14.96 1 +15.01 1 +15.02 1 +15.07 1 +15.09 1 +15.13 1 +15.14 2 +15.15 1 +15.16 1 +15.17 1 +15.19 2 +15.20 2 +15.22 5 +15.23 2 +15.26 2 +15.27 1 +15.28 2 +15.29 1 +15.30 2 +15.31 1 +15.32 1 +15.35 1 +15.36 1 +15.37 1 +15.38 2 +15.40 1 +15.42 3 +15.43 1 +15.46 1 +15.47 2 +15.50 1 +15.52 1 +15.54 3 +15.55 2 +15.57 1 +15.58 1 +15.60 3 +15.63 1 +15.65 2 +15.67 2 +15.69 2 +15.70 4 +15.75 1 +15.76 1 +15.79 1 +15.80 1 +15.81 2 +15.82 2 +15.83 2 +15.85 1 +15.87 2 +15.88 1 +15.89 2 +15.90 2 +15.96 1 +15.98 2 +15.99 2 +16.00 2 +16.01 1 +16.03 1 +16.05 2 +16.06 3 +16.09 1 +16.10 1 +16.11 1 +16.13 1 +16.14 1 +16.15 3 +16.17 2 +16.18 2 +16.19 1 +16.21 1 +16.22 1 +16.23 1 +16.24 1 +16.25 1 +16.26 3 +16.27 3 +16.29 1 +16.32 1 +16.34 1 +16.35 1 +16.38 1 +16.39 3 +16.40 1 +16.41 1 +16.43 1 +16.48 1 +16.49 1 +16.51 1 +16.53 1 +16.54 1 +16.55 1 +16.57 2 +16.58 1 +16.59 3 +16.60 2 +16.61 1 +16.63 2 +16.66 1 +16.67 1 +16.68 2 +16.70 1 +16.72 1 +16.74 1 +16.75 1 +16.76 1 +16.77 1 +16.79 1 +16.81 1 +16.84 1 +16.85 2 +16.86 2 +16.87 5 +16.89 2 +16.91 1 +16.92 1 +16.93 2 +16.94 1 +16.95 1 +17.00 1 +17.01 1 +17.02 1 +17.03 1 +17.05 2 +17.06 1 +17.07 2 +17.08 1 +17.09 4 +17.12 1 +17.13 2 +17.15 1 +17.17 3 +17.19 2 +17.20 1 +17.21 1 +17.23 3 +17.25 1 +17.26 1 +17.27 3 +17.29 2 +17.31 1 +17.33 1 +17.35 1 +17.38 1 +17.39 1 +17.41 1 +17.42 2 +17.45 1 +17.47 1 +17.48 1 +17.49 1 +17.50 1 +17.52 2 +17.53 1 +17.57 1 +17.58 2 +17.59 1 +17.60 1 +17.61 1 +17.62 1 +17.64 2 +17.65 1 +17.66 1 +17.67 1 +17.68 4 +17.71 1 +17.72 1 +17.73 1 +17.74 1 +17.75 2 +17.77 2 +17.78 1 +17.79 2 +17.83 1 +17.84 1 +17.86 1 +17.87 1 +17.89 1 +17.92 2 +17.93 1 +17.95 1 +17.98 1 +18.00 2 +18.01 1 +18.02 3 +18.03 2 +18.04 1 +18.05 1 +18.06 1 +18.08 4 +18.10 1 +18.11 3 +18.12 4 +18.13 1 +18.14 5 +18.15 2 +18.16 1 +18.17 1 +18.18 1 +18.19 1 +18.20 3 +18.21 3 +18.23 2 +18.24 1 +18.25 1 +18.27 1 +18.28 2 +18.30 2 +18.31 1 +18.32 1 +18.35 2 +18.36 1 +18.37 2 +18.38 2 +18.39 1 +18.42 2 +18.43 1 +18.46 1 +18.47 1 +18.49 2 +18.50 1 +18.51 2 +18.52 1 +18.55 1 +18.56 1 +18.57 2 +18.64 1 +18.65 1 +18.66 1 +18.67 1 +18.68 4 +18.69 3 +18.70 2 +18.73 1 +18.74 2 +18.75 1 +18.76 1 +18.77 1 +18.78 1 +18.79 1 +18.80 2 +18.83 1 +18.85 2 +18.86 2 +18.88 3 +18.89 1 +18.90 1 +18.94 1 +18.96 1 +18.98 1 +19.00 1 +19.01 2 +19.02 1 +19.03 1 +19.04 2 +19.06 1 +19.07 2 +19.08 2 +19.10 2 +19.11 3 +19.13 2 +19.14 4 +19.15 1 +19.16 1 +19.17 2 +19.18 2 +19.19 2 +19.21 1 +19.24 2 +19.26 2 +19.27 1 +19.28 2 +19.30 4 +19.31 1 +19.32 1 +19.33 1 +19.34 1 +19.37 1 +19.42 2 +19.43 1 +19.44 2 +19.45 1 +19.46 2 +19.47 2 +19.51 1 +19.53 2 +19.54 1 +19.55 3 +19.57 1 +19.58 2 +19.60 1 +19.61 3 +19.62 1 +19.63 2 +19.64 3 +19.65 1 +19.68 3 +19.69 1 +19.71 1 +19.72 1 +19.73 1 +19.77 2 +19.78 3 +19.79 1 +19.81 1 +19.82 1 +19.83 1 +19.84 4 +19.85 1 +19.87 3 +19.88 2 +19.89 1 +19.90 1 +19.91 2 +19.93 3 +19.95 3 +19.96 1 +19.97 1 +19.99 1 +2.03 1 +2.04 2 +2.06 2 +2.07 3 +2.08 1 +2.10 2 +2.11 1 +2.14 1 +2.15 3 +2.17 1 +2.19 1 +2.20 1 +2.21 1 +2.22 1 +2.25 1 +2.26 2 +2.29 2 +2.32 1 +2.33 1 +2.35 2 +2.36 2 +2.37 2 +2.41 1 +2.45 1 +2.46 1 +2.48 3 +2.51 3 +2.52 1 +2.54 1 +2.55 1 +2.56 2 +2.57 1 +2.61 2 +2.62 1 +2.63 2 +2.64 1 +2.66 2 +2.68 3 +2.70 2 +2.72 3 +2.75 2 +2.76 1 +2.77 2 +2.80 2 +2.81 1 +2.82 5 +2.83 2 +2.84 2 +2.86 1 +2.87 2 +2.89 1 +2.90 1 +2.92 1 +2.93 1 +2.95 1 +2.96 2 +2.97 2 +2.98 2 +2.99 1 +20.00 1 +20.02 1 +20.06 1 +20.08 1 +20.09 1 +20.11 1 +20.12 1 +20.13 1 +20.14 2 +20.15 1 +20.18 1 +20.21 1 +20.23 2 +20.24 1 +20.29 1 +20.30 1 +20.31 2 +20.32 1 +20.33 1 +20.34 1 +20.35 3 +20.37 1 +20.39 2 +20.40 1 +20.43 2 +20.44 3 +20.46 1 +20.50 1 +20.51 3 +20.52 1 +20.53 1 +20.54 1 +20.56 1 +20.57 1 +20.60 1 +20.61 3 +20.63 1 +20.65 3 +20.67 1 +20.68 2 +20.69 3 +20.72 1 +20.80 1 +20.81 1 +20.84 1 +20.85 2 +20.88 2 +20.90 2 +20.91 1 +20.93 1 +20.94 3 +20.95 3 +20.97 3 +20.98 1 +20.99 1 +21.01 1 +21.02 1 +21.04 1 +21.06 1 +21.08 1 +21.09 1 +21.10 1 +21.11 2 +21.13 1 +21.14 2 +21.15 4 +21.19 2 +21.21 1 +21.23 1 +21.24 1 +21.25 1 +21.26 2 +21.27 1 +21.28 3 +21.29 4 +21.34 1 +21.35 1 +21.36 2 +21.37 1 +21.39 1 +21.40 1 +21.41 2 +21.42 1 +21.43 3 +21.44 2 +21.45 2 +21.46 1 +21.48 1 +21.49 3 +21.50 2 +21.55 4 +21.56 2 +21.59 4 +21.60 3 +21.61 2 +21.62 2 +21.63 1 +21.65 1 +21.66 2 +21.67 1 +21.68 1 +21.69 2 +21.70 3 +21.71 1 +21.72 1 +21.73 1 +21.75 1 +21.76 1 +21.78 1 +21.79 5 +21.80 1 +21.81 4 +21.83 1 +21.84 3 +21.89 2 +21.92 2 +21.93 3 +21.94 2 +21.96 2 +21.97 1 +21.98 1 +21.99 1 +22.00 1 +22.01 1 +22.02 2 +22.03 3 +22.04 1 +22.05 1 +22.07 2 +22.08 1 +22.11 1 +22.13 2 +22.14 2 +22.15 2 +22.16 3 +22.18 1 +22.19 1 +22.22 1 +22.23 1 +22.24 1 +22.25 1 +22.30 1 +22.32 1 +22.34 3 +22.35 1 +22.38 1 +22.40 1 +22.44 3 +22.45 1 +22.47 1 +22.48 1 +22.49 1 +22.53 1 +22.59 3 +22.60 2 +22.62 2 +22.64 1 +22.66 4 +22.67 1 +22.68 3 +22.69 1 +22.70 1 +22.71 2 +22.72 3 +22.73 3 +22.76 2 +22.77 2 +22.80 1 +22.81 1 +22.85 1 +22.86 1 +22.88 1 +22.93 1 +22.94 2 +22.95 1 +22.99 1 +23.01 2 +23.03 1 +23.05 1 +23.06 1 +23.07 3 +23.08 1 +23.09 3 +23.11 1 +23.14 2 +23.17 1 +23.18 1 +23.19 2 +23.20 2 +23.21 1 +23.22 1 +23.23 1 +23.24 1 +23.28 1 +23.32 1 +23.33 1 +23.34 2 +23.35 2 +23.37 1 +23.38 1 +23.40 1 +23.41 1 +23.42 2 +23.43 1 +23.44 2 +23.45 2 +23.46 1 +23.47 1 +23.48 1 +23.51 1 +23.53 4 +23.55 4 +23.57 2 +23.58 1 +23.59 1 +23.60 2 +23.61 2 +23.63 3 +23.65 1 +23.67 3 +23.68 1 +23.72 1 +23.73 1 +23.74 1 +23.76 2 +23.78 2 +23.79 2 +23.82 2 +23.84 3 +23.85 2 +23.89 1 +23.90 1 +23.91 3 +23.93 2 +23.94 2 +23.95 2 +23.99 2 +24.02 1 +24.03 1 +24.04 4 +24.05 1 +24.06 1 +24.07 1 +24.08 1 +24.10 1 +24.11 1 +24.14 2 +24.15 1 +24.16 1 +24.19 2 +24.20 2 +24.21 1 +24.23 1 +24.24 1 +24.25 1 +24.26 2 +24.30 2 +24.32 1 +24.33 1 +24.34 1 +24.36 3 +24.37 1 +24.38 1 +24.39 1 +24.40 2 +24.44 1 +24.45 1 +24.47 1 +24.48 4 +24.49 2 +24.53 1 +24.54 1 +24.55 1 +24.56 1 +24.60 1 +24.61 1 +24.62 2 +24.63 1 +24.64 1 +24.65 2 +24.68 1 +24.69 2 +24.70 2 +24.71 1 +24.73 1 +24.76 1 +24.78 1 +24.79 1 +24.80 1 +24.82 1 +24.83 1 +24.84 3 +24.86 1 +24.87 5 +24.89 1 +24.90 2 +24.92 2 +24.94 2 +24.96 1 +24.97 2 +24.98 2 +24.99 1 +25.00 2 +25.04 1 +25.06 1 +25.08 1 +25.10 2 +25.12 1 +25.13 2 +25.14 1 +25.15 1 +25.17 2 +25.18 3 +25.19 2 +25.20 1 +25.21 2 +25.22 1 +25.23 1 +25.24 1 +25.25 2 +25.27 1 +25.28 1 +25.29 2 +25.31 4 +25.35 1 +25.36 2 +25.37 1 +25.38 2 +25.39 1 +25.40 1 +25.41 4 +25.42 1 +25.43 1 +25.47 1 +25.50 1 +25.51 2 +25.52 2 +25.53 1 +25.56 2 +25.57 2 +25.58 3 +25.62 2 +25.63 1 +25.64 1 +25.65 1 +25.66 1 +25.67 2 +25.68 1 +25.69 1 +25.70 1 +25.71 5 +25.72 1 +25.73 1 +25.74 1 +25.76 1 +25.77 2 +25.80 1 +25.83 2 +25.84 2 +25.86 1 +25.88 2 +25.89 2 +25.90 1 +25.91 1 +25.92 1 +25.94 2 +25.95 1 +25.97 1 +25.98 1 +25.99 1 +26.00 1 +26.01 3 +26.02 2 +26.03 1 +26.04 1 +26.05 1 +26.06 2 +26.09 1 +26.10 1 +26.11 2 +26.12 1 +26.13 1 +26.16 1 +26.17 1 +26.19 1 +26.20 2 +26.21 2 +26.22 2 +26.24 1 +26.27 1 +26.31 1 +26.32 3 +26.33 1 +26.34 1 +26.35 1 +26.36 1 +26.37 1 +26.40 2 +26.41 1 +26.42 1 +26.43 1 +26.44 1 +26.46 2 +26.54 1 +26.56 1 +26.57 2 +26.60 1 +26.62 2 +26.64 1 +26.65 1 +26.66 1 +26.67 1 +26.68 1 +26.70 1 +26.73 1 +26.74 2 +26.77 1 +26.78 2 +26.79 1 +26.80 3 +26.81 1 +26.83 1 +26.85 2 +26.86 1 +26.88 1 +26.89 1 +26.91 1 +26.93 1 +26.94 2 +26.95 1 +26.96 2 +26.97 1 +26.98 1 +26.99 2 +27.00 1 +27.01 3 +27.02 1 +27.03 1 +27.04 4 +27.05 1 +27.06 1 +27.07 3 +27.12 4 +27.13 2 +27.14 1 +27.18 1 +27.25 1 +27.26 1 +27.27 1 +27.32 1 +27.33 2 +27.34 2 +27.35 3 +27.36 1 +27.38 1 +27.39 2 +27.43 1 +27.44 1 +27.45 1 +27.47 1 +27.48 1 +27.49 1 +27.51 1 +27.52 2 +27.53 3 +27.55 1 +27.56 1 +27.57 4 +27.58 3 +27.59 2 +27.60 1 +27.62 2 +27.63 3 +27.64 1 +27.67 1 +27.68 2 +27.71 1 +27.72 2 +27.75 1 +27.76 3 +27.79 1 +27.80 3 +27.81 1 +27.82 1 +27.83 1 +27.84 1 +27.86 1 +27.87 1 +27.88 1 +27.89 3 +27.90 1 +27.92 1 +27.95 1 +27.96 2 +27.99 1 +28.00 1 +28.02 1 +28.03 2 +28.04 2 +28.05 2 +28.07 3 +28.08 2 +28.09 3 +28.10 1 +28.11 2 +28.14 2 +28.15 2 +28.17 2 +28.18 1 +28.20 2 +28.22 2 +28.23 1 +28.25 2 +28.28 2 +28.29 1 +28.30 1 +28.31 4 +28.33 3 +28.34 1 +28.35 2 +28.37 1 +28.40 2 +28.41 2 +28.42 1 +28.43 1 +28.44 1 +28.47 2 +28.48 1 +28.49 2 +28.51 1 +28.53 1 +28.54 2 +28.55 2 +28.60 1 +28.61 1 +28.62 2 +28.63 2 +28.64 1 +28.65 1 +28.67 1 +28.68 1 +28.69 2 +28.71 2 +28.72 1 +28.74 2 +28.76 2 +28.78 1 +28.80 1 +28.81 1 +28.84 1 +28.85 1 +28.87 1 +28.90 2 +28.91 2 +28.92 3 +28.93 3 +28.94 1 +28.95 1 +28.96 1 +28.98 1 +29.00 1 +29.01 2 +29.02 1 +29.03 1 +29.04 1 +29.06 2 +29.07 1 +29.09 1 +29.14 1 +29.15 1 +29.19 2 +29.21 1 +29.23 1 +29.26 1 +29.27 3 +29.28 1 +29.30 1 +29.31 1 +29.32 2 +29.34 1 +29.36 2 +29.37 1 +29.38 1 +29.39 1 +29.40 1 +29.41 1 +29.42 1 +29.43 1 +29.48 2 +29.49 4 +29.53 1 +29.54 3 +29.55 1 +29.57 3 +29.58 1 +29.59 3 +29.60 1 +29.61 1 +29.62 2 +29.63 1 +29.64 2 +29.65 2 +29.66 2 +29.67 1 +29.68 3 +29.70 1 +29.72 1 +29.73 1 +29.74 2 +29.75 3 +29.79 1 +29.83 1 +29.84 1 +29.85 1 +29.86 1 +29.87 1 +29.88 1 +29.89 1 +29.90 1 +29.92 2 +29.93 1 +29.95 1 +29.96 1 +29.97 1 +29.99 3 +3.00 2 +3.01 2 +3.04 3 +3.07 3 +3.08 2 +3.09 1 +3.11 2 +3.12 1 +3.13 3 +3.14 1 +3.16 2 +3.18 2 +3.22 2 +3.25 1 +3.27 2 +3.29 2 +3.31 1 +3.34 1 +3.36 3 +3.37 2 +3.39 2 +3.40 1 +3.41 1 +3.43 2 +3.44 1 +3.45 2 +3.46 3 +3.47 2 +3.48 1 +3.49 1 +3.51 4 +3.52 2 +3.53 1 +3.54 2 +3.55 1 +3.58 3 +3.59 2 +3.60 3 +3.61 2 +3.64 1 +3.65 2 +3.68 2 +3.69 2 +3.73 2 +3.74 1 +3.75 2 +3.78 1 +3.79 1 +3.81 3 +3.83 1 +3.84 3 +3.85 1 +3.86 1 +3.87 4 +3.88 3 +3.90 1 +3.91 2 +3.93 3 +3.94 1 +3.95 2 +3.98 4 +30.00 2 +30.01 5 +30.02 2 +30.05 1 +30.06 2 +30.07 1 +30.08 2 +30.10 1 +30.11 2 +30.15 1 +30.16 1 +30.17 1 +30.18 1 +30.20 2 +30.21 1 +30.23 3 +30.24 1 +30.25 3 +30.26 1 +30.27 3 +30.30 1 +30.32 1 +30.33 2 +30.34 1 +30.35 2 +30.37 3 +30.38 1 +30.40 1 +30.41 1 +30.42 2 +30.43 2 +30.45 2 +30.47 1 +30.49 3 +30.54 1 +30.55 2 +30.57 3 +30.58 2 +30.59 3 +30.60 3 +30.65 2 +30.66 1 +30.67 3 +30.68 2 +30.70 2 +30.71 1 +30.72 1 +30.74 1 +30.76 1 +30.77 3 +30.78 1 +30.79 1 +30.80 3 +30.81 3 +30.82 1 +30.83 1 +30.84 2 +30.85 1 +30.86 1 +30.89 1 +30.91 1 +30.93 1 +30.94 3 +30.96 3 +30.97 1 +30.98 1 +30.99 1 +31.01 1 +31.02 2 +31.03 3 +31.04 1 +31.06 1 +31.07 2 +31.09 1 +31.10 2 +31.11 1 +31.12 1 +31.13 1 +31.16 1 +31.17 2 +31.18 1 +31.19 1 +31.20 1 +31.22 3 +31.23 1 +31.24 2 +31.25 1 +31.27 4 +31.28 1 +31.29 4 +31.30 1 +31.31 1 +31.32 1 +31.34 2 +31.35 3 +31.36 1 +31.37 1 +31.41 2 +31.42 3 +31.43 2 +31.44 2 +31.46 1 +31.48 2 +31.49 3 +31.50 2 +31.54 1 +31.56 3 +31.58 2 +31.59 1 +31.60 3 +31.63 1 +31.66 1 +31.68 2 +31.70 4 +31.71 2 +31.72 1 +31.73 1 +31.74 1 +31.75 2 +31.76 2 +31.78 1 +31.79 2 +31.83 3 +31.85 1 +31.87 2 +31.91 1 +31.92 1 +31.93 2 +31.94 1 +31.96 2 +31.98 1 +31.99 1 +32.01 2 +32.02 2 +32.03 2 +32.05 1 +32.06 2 +32.07 1 +32.08 2 +32.09 2 +32.10 3 +32.11 1 +32.12 2 +32.13 1 +32.15 3 +32.16 1 +32.17 1 +32.18 1 +32.20 1 +32.22 1 +32.25 1 +32.27 1 +32.28 1 +32.30 1 +32.31 1 +32.32 1 +32.33 1 +32.36 1 +32.37 1 +32.40 1 +32.41 3 +32.43 1 +32.44 1 +32.45 3 +32.47 1 +32.48 1 +32.52 1 +32.55 2 +32.56 2 +32.57 1 +32.60 1 +32.61 1 +32.63 1 +32.64 1 +32.65 2 +32.66 1 +32.68 2 +32.72 1 +32.73 2 +32.78 3 +32.79 1 +32.80 3 +32.83 1 +32.84 1 +32.85 2 +32.86 1 +32.88 1 +32.89 1 +32.92 1 +32.94 1 +32.95 2 +32.96 3 +32.97 2 +32.99 1 +33.00 3 +33.01 1 +33.03 5 +33.04 3 +33.05 1 +33.06 1 +33.07 3 +33.11 2 +33.12 2 +33.13 1 +33.14 1 +33.15 1 +33.17 1 +33.18 2 +33.20 2 +33.21 1 +33.22 3 +33.24 1 +33.25 1 +33.27 3 +33.29 3 +33.30 1 +33.31 1 +33.33 1 +33.35 2 +33.36 2 +33.37 3 +33.38 1 +33.39 1 +33.40 1 +33.41 2 +33.42 1 +33.45 2 +33.47 2 +33.49 1 +33.50 2 +33.51 2 +33.53 1 +33.55 1 +33.58 1 +33.59 1 +33.63 2 +33.64 1 +33.65 1 +33.66 2 +33.67 1 +33.68 1 +33.69 3 +33.70 1 +33.71 1 +33.72 3 +33.73 2 +33.75 1 +33.76 3 +33.77 4 +33.82 1 +33.83 1 +33.84 1 +33.87 1 +33.88 2 +33.89 2 +33.90 2 +33.92 2 +33.95 2 +33.99 1 +34.01 2 +34.02 1 +34.04 1 +34.07 1 +34.08 1 +34.10 1 +34.11 1 +34.12 1 +34.13 1 +34.14 1 +34.20 1 +34.22 4 +34.23 2 +34.27 2 +34.29 1 +34.30 1 +34.31 1 +34.32 1 +34.34 1 +34.37 1 +34.38 1 +34.40 2 +34.42 3 +34.44 1 +34.45 2 +34.47 1 +34.49 1 +34.50 1 +34.54 2 +34.55 2 +34.57 1 +34.58 2 +34.59 2 +34.61 2 +34.63 2 +34.64 1 +34.65 2 +34.66 1 +34.67 1 +34.70 2 +34.71 2 +34.72 2 +34.75 1 +34.77 1 +34.78 2 +34.79 1 +34.80 3 +34.82 2 +34.83 1 +34.84 2 +34.85 2 +34.87 4 +34.88 3 +34.90 2 +34.91 1 +34.94 1 +34.95 1 +34.96 1 +34.98 1 +35.01 1 +35.03 1 +35.05 1 +35.06 1 +35.07 1 +35.08 1 +35.09 1 +35.10 2 +35.11 1 +35.13 2 +35.14 4 +35.15 2 +35.16 1 +35.17 1 +35.19 1 +35.20 1 +35.21 3 +35.23 1 +35.25 1 +35.26 1 +35.27 1 +35.28 1 +35.29 1 +35.32 2 +35.33 1 +35.36 1 +35.37 1 +35.38 2 +35.40 3 +35.42 1 +35.43 1 +35.45 3 +35.46 2 +35.48 2 +35.51 1 +35.55 1 +35.56 1 +35.58 1 +35.59 1 +35.63 1 +35.65 1 +35.66 1 +35.68 2 +35.70 1 +35.73 2 +35.75 2 +35.76 1 +35.77 1 +35.78 2 +35.79 1 +35.80 2 +35.82 1 +35.83 2 +35.84 1 +35.85 1 +35.86 2 +35.89 1 +35.90 2 +35.93 1 +35.94 2 +35.95 1 +35.96 1 +35.97 2 +35.98 1 +36.04 2 +36.05 1 +36.06 1 +36.10 1 +36.11 3 +36.13 1 +36.17 1 +36.18 2 +36.21 1 +36.22 2 +36.23 1 +36.24 1 +36.25 2 +36.28 1 +36.30 1 +36.31 1 +36.32 2 +36.33 4 +36.34 2 +36.36 2 +36.37 1 +36.38 5 +36.41 4 +36.42 1 +36.43 3 +36.45 1 +36.48 1 +36.49 1 +36.50 2 +36.51 2 +36.52 1 +36.53 1 +36.54 2 +36.56 1 +36.57 1 +36.58 2 +36.59 1 +36.60 1 +36.61 2 +36.63 1 +36.65 1 +36.66 2 +36.68 1 +36.69 1 +36.73 2 +36.74 2 +36.75 3 +36.76 2 +36.77 6 +36.80 1 +36.81 1 +36.82 1 +36.84 1 +36.85 2 +36.86 1 +36.87 1 +36.88 1 +36.89 1 +36.90 1 +36.91 1 +36.92 2 +36.93 2 +36.95 3 +36.96 3 +36.97 1 +36.98 1 +36.99 1 +37.00 1 +37.01 1 +37.02 2 +37.03 1 +37.05 2 +37.06 3 +37.07 3 +37.10 1 +37.11 1 +37.12 2 +37.15 2 +37.16 1 +37.17 1 +37.18 1 +37.22 2 +37.24 1 +37.25 2 +37.27 1 +37.28 1 +37.30 1 +37.31 1 +37.32 1 +37.34 4 +37.35 2 +37.36 1 +37.38 1 +37.39 2 +37.41 2 +37.42 1 +37.43 2 +37.44 1 +37.45 2 +37.46 1 +37.48 3 +37.53 1 +37.55 1 +37.56 1 +37.60 5 +37.61 1 +37.63 2 +37.64 1 +37.65 1 +37.66 1 +37.67 1 +37.69 1 +37.70 1 +37.71 2 +37.73 1 +37.74 3 +37.76 1 +37.78 2 +37.79 1 +37.80 2 +37.82 2 +37.84 3 +37.85 1 +37.86 3 +37.88 1 +37.91 1 +37.93 1 +37.94 1 +37.95 3 +37.97 2 +37.98 1 +37.99 1 +38.00 1 +38.01 2 +38.02 1 +38.03 2 +38.04 2 +38.05 1 +38.08 3 +38.09 1 +38.11 1 +38.13 1 +38.14 2 +38.15 1 +38.16 1 +38.17 1 +38.19 1 +38.20 1 +38.22 2 +38.23 3 +38.24 1 +38.25 2 +38.26 1 +38.27 2 +38.29 1 +38.30 1 +38.31 1 +38.33 1 +38.34 2 +38.35 1 +38.36 1 +38.37 1 +38.39 1 +38.41 1 +38.42 1 +38.43 1 +38.44 1 +38.45 1 +38.46 1 +38.47 1 +38.48 1 +38.49 1 +38.50 4 +38.54 3 +38.55 2 +38.56 1 +38.58 1 +38.60 3 +38.61 3 +38.64 1 +38.65 1 +38.66 1 +38.67 2 +38.70 3 +38.71 2 +38.73 2 +38.74 1 +38.75 2 +38.76 1 +38.77 1 +38.81 1 +38.82 2 +38.83 1 +38.84 2 +38.88 3 +38.91 2 +38.93 2 +38.98 4 +39.04 2 +39.05 2 +39.06 1 +39.07 1 +39.09 1 +39.12 5 +39.14 1 +39.15 1 +39.16 2 +39.18 1 +39.19 1 +39.20 2 +39.21 1 +39.22 1 +39.25 1 +39.27 2 +39.31 1 +39.32 2 +39.36 1 +39.37 3 +39.38 1 +39.40 2 +39.42 4 +39.43 1 +39.47 2 +39.48 2 +39.51 1 +39.52 2 +39.55 1 +39.56 2 +39.60 2 +39.61 4 +39.62 1 +39.63 1 +39.67 1 +39.69 2 +39.70 2 +39.71 1 +39.74 2 +39.75 1 +39.76 1 +39.77 2 +39.80 1 +39.81 2 +39.82 2 +39.84 1 +39.85 1 +39.86 2 +39.88 1 +39.89 2 +39.90 3 +39.91 1 +39.92 1 +39.94 2 +39.95 1 +39.96 1 +39.97 1 +39.98 2 +4.03 1 +4.05 1 +4.07 1 +4.08 1 +4.09 1 +4.11 1 +4.14 2 +4.16 2 +4.17 1 +4.18 1 +4.19 1 +4.21 2 +4.22 1 +4.23 1 +4.24 1 +4.25 1 +4.26 1 +4.28 2 +4.29 1 +4.30 2 +4.31 2 +4.32 1 +4.33 2 +4.34 1 +4.35 2 +4.36 2 +4.39 1 +4.40 1 +4.41 1 +4.43 1 +4.44 2 +4.45 1 +4.47 2 +4.49 2 +4.50 1 +4.51 1 +4.52 1 +4.54 1 +4.55 3 +4.56 1 +4.57 1 +4.58 2 +4.59 1 +4.60 5 +4.62 3 +4.64 2 +4.67 1 +4.68 1 +4.70 3 +4.73 1 +4.74 1 +4.77 1 +4.78 2 +4.79 1 +4.82 1 +4.85 1 +4.87 1 +4.88 2 +4.89 3 +4.90 1 +4.91 1 +4.93 1 +4.94 1 +4.95 1 +4.97 2 +40.04 1 +40.06 2 +40.07 2 +40.10 1 +40.11 2 +40.12 3 +40.14 1 +40.18 2 +40.21 1 +40.22 1 +40.28 1 +40.29 1 +40.30 1 +40.33 2 +40.35 1 +40.39 4 +40.40 1 +40.41 1 +40.42 1 +40.43 1 +40.48 1 +40.50 3 +40.51 1 +40.52 1 +40.54 2 +40.56 1 +40.57 2 +40.58 3 +40.59 1 +40.60 2 +40.62 2 +40.63 3 +40.64 1 +40.65 2 +40.66 1 +40.67 1 +40.68 3 +40.70 1 +40.73 1 +40.74 1 +40.76 1 +40.79 1 +40.81 2 +40.82 1 +40.84 1 +40.87 1 +40.88 1 +40.90 1 +40.91 1 +40.92 1 +40.93 1 +40.94 1 +40.96 1 +40.97 1 +40.99 2 +41.00 1 +41.01 2 +41.02 2 +41.04 1 +41.05 2 +41.06 1 +41.08 2 +41.10 1 +41.11 2 +41.13 3 +41.14 1 +41.16 2 +41.19 2 +41.21 1 +41.26 1 +41.27 2 +41.28 1 +41.30 2 +41.32 1 +41.33 1 +41.35 1 +41.37 2 +41.38 1 +41.39 2 +41.40 1 +41.41 1 +41.43 1 +41.44 2 +41.45 1 +41.46 1 +41.47 1 +41.51 2 +41.52 2 +41.53 1 +41.54 2 +41.55 2 +41.56 2 +41.57 2 +41.58 2 +41.59 1 +41.60 1 +41.61 3 +41.62 1 +41.63 1 +41.67 2 +41.68 3 +41.69 2 +41.70 2 +41.71 2 +41.72 1 +41.75 1 +41.76 1 +41.78 1 +41.80 1 +41.81 2 +41.83 1 +41.86 1 +41.87 3 +41.88 2 +41.89 1 +41.90 2 +41.91 1 +41.92 1 +41.93 1 +41.96 1 +41.97 1 +42.00 1 +42.01 2 +42.02 3 +42.04 1 +42.06 1 +42.09 4 +42.11 1 +42.13 2 +42.14 1 +42.17 1 +42.19 1 +42.20 1 +42.22 2 +42.25 1 +42.26 2 +42.28 2 +42.29 1 +42.30 2 +42.33 1 +42.34 1 +42.35 1 +42.36 2 +42.38 1 +42.39 2 +42.40 2 +42.42 1 +42.43 1 +42.44 1 +42.47 1 +42.48 2 +42.52 2 +42.53 1 +42.54 2 +42.57 1 +42.58 2 +42.60 2 +42.62 2 +42.64 1 +42.66 2 +42.67 1 +42.68 3 +42.70 1 +42.71 1 +42.74 5 +42.76 2 +42.78 2 +42.79 1 +42.81 1 +42.82 1 +42.84 1 +42.85 5 +42.86 1 +42.87 1 +42.88 3 +42.89 1 +42.90 1 +42.91 1 +42.93 4 +42.94 2 +42.98 1 +42.99 1 +43.00 1 +43.01 1 +43.03 1 +43.05 1 +43.08 1 +43.09 1 +43.10 1 +43.11 2 +43.13 2 +43.16 1 +43.18 3 +43.25 2 +43.26 5 +43.29 1 +43.31 2 +43.32 1 +43.33 2 +43.37 3 +43.39 1 +43.41 2 +43.42 1 +43.43 1 +43.44 1 +43.45 1 +43.47 1 +43.48 1 +43.49 1 +43.51 1 +43.53 1 +43.55 3 +43.56 2 +43.58 1 +43.59 1 +43.60 1 +43.61 1 +43.62 1 +43.65 2 +43.66 3 +43.67 1 +43.71 1 +43.72 2 +43.73 1 +43.74 1 +43.75 1 +43.77 1 +43.78 2 +43.79 1 +43.80 4 +43.81 3 +43.82 1 +43.84 2 +43.85 1 +43.87 1 +43.88 1 +43.89 1 +43.92 2 +43.93 1 +43.94 1 +43.95 1 +44.00 1 +44.02 1 +44.04 1 +44.09 2 +44.10 2 +44.17 1 +44.18 1 +44.19 1 +44.20 2 +44.21 4 +44.22 1 +44.24 1 +44.25 1 +44.26 2 +44.30 2 +44.31 1 +44.32 4 +44.33 2 +44.35 1 +44.36 2 +44.37 1 +44.38 1 +44.39 1 +44.41 2 +44.44 1 +44.47 2 +44.49 1 +44.50 1 +44.51 2 +44.52 1 +44.53 2 +44.56 1 +44.57 1 +44.58 3 +44.59 2 +44.60 2 +44.61 2 +44.65 2 +44.66 2 +44.67 2 +44.68 1 +44.69 1 +44.72 1 +44.74 1 +44.75 3 +44.76 1 +44.77 1 +44.78 1 +44.80 1 +44.81 2 +44.83 1 +44.84 1 +44.85 2 +44.88 2 +44.90 1 +44.91 2 +44.92 1 +44.93 1 +44.94 4 +44.95 1 +44.96 5 +44.98 1 +45.00 1 +45.02 1 +45.03 1 +45.05 1 +45.06 1 +45.08 2 +45.12 1 +45.13 1 +45.15 1 +45.19 1 +45.20 1 +45.21 2 +45.23 1 +45.24 1 +45.25 2 +45.29 4 +45.30 1 +45.31 1 +45.32 1 +45.33 2 +45.34 1 +45.35 1 +45.36 3 +45.38 3 +45.40 2 +45.41 1 +45.43 1 +45.45 3 +45.46 1 +45.47 1 +45.48 1 +45.49 1 +45.54 1 +45.58 2 +45.60 1 +45.61 1 +45.63 2 +45.64 2 +45.65 1 +45.67 2 +45.69 4 +45.70 2 +45.74 1 +45.77 2 +45.78 1 +45.81 2 +45.82 1 +45.83 2 +45.84 1 +45.86 4 +45.89 3 +45.90 1 +45.92 1 +45.94 1 +45.96 2 +45.97 1 +45.98 3 +45.99 1 +46.00 1 +46.01 1 +46.04 1 +46.05 1 +46.06 1 +46.09 3 +46.11 1 +46.12 1 +46.13 1 +46.14 3 +46.15 1 +46.19 2 +46.20 1 +46.21 1 +46.23 2 +46.24 1 +46.25 1 +46.26 2 +46.29 2 +46.30 1 +46.32 2 +46.33 1 +46.34 2 +46.35 1 +46.36 3 +46.37 1 +46.39 2 +46.40 1 +46.41 1 +46.42 2 +46.43 1 +46.45 1 +46.47 2 +46.48 2 +46.50 1 +46.53 1 +46.54 1 +46.55 1 +46.57 1 +46.58 1 +46.60 1 +46.61 1 +46.62 1 +46.66 1 +46.70 1 +46.71 1 +46.72 2 +46.73 1 +46.74 1 +46.75 1 +46.77 1 +46.78 2 +46.81 1 +46.84 2 +46.86 1 +46.87 2 +46.90 1 +46.91 1 +46.92 2 +46.93 4 +46.94 1 +46.95 1 +46.96 1 +46.97 1 +46.98 2 +46.99 3 +47.02 1 +47.03 2 +47.06 1 +47.07 1 +47.08 1 +47.09 3 +47.11 1 +47.12 3 +47.13 1 +47.16 3 +47.17 1 +47.19 1 +47.23 3 +47.25 3 +47.26 2 +47.29 1 +47.30 1 +47.31 1 +47.32 1 +47.34 1 +47.35 3 +47.37 1 +47.38 1 +47.41 2 +47.42 1 +47.45 5 +47.46 2 +47.48 1 +47.49 2 +47.51 2 +47.52 1 +47.53 1 +47.54 3 +47.55 1 +47.56 1 +47.57 2 +47.60 1 +47.61 2 +47.62 1 +47.63 1 +47.64 1 +47.65 1 +47.66 1 +47.68 1 +47.69 1 +47.70 1 +47.71 1 +47.72 1 +47.75 1 +47.77 3 +47.79 1 +47.81 2 +47.82 1 +47.83 1 +47.85 1 +47.86 1 +47.88 1 +47.89 1 +47.91 5 +47.94 1 +47.96 1 +47.97 3 +47.99 2 +48.02 5 +48.05 1 +48.06 1 +48.07 2 +48.08 2 +48.11 1 +48.12 1 +48.13 3 +48.14 1 +48.15 1 +48.16 1 +48.17 3 +48.18 1 +48.19 1 +48.20 4 +48.21 3 +48.22 1 +48.24 1 +48.25 1 +48.26 1 +48.27 2 +48.28 1 +48.29 2 +48.31 1 +48.32 1 +48.33 1 +48.34 2 +48.36 1 +48.37 2 +48.38 1 +48.41 1 +48.42 1 +48.43 2 +48.48 3 +48.49 2 +48.50 3 +48.51 3 +48.54 3 +48.55 1 +48.59 1 +48.60 1 +48.61 2 +48.63 1 +48.64 1 +48.68 3 +48.69 2 +48.70 1 +48.71 2 +48.73 2 +48.75 1 +48.76 2 +48.77 3 +48.78 1 +48.79 2 +48.80 3 +48.81 3 +48.83 1 +48.84 4 +48.85 1 +48.86 1 +48.88 2 +48.90 3 +48.91 1 +48.92 1 +48.93 1 +48.94 2 +48.95 1 +48.96 2 +48.97 1 +48.98 1 +49.00 1 +49.01 2 +49.02 2 +49.03 1 +49.05 1 +49.06 1 +49.07 1 +49.10 2 +49.12 3 +49.13 1 +49.14 1 +49.15 1 +49.17 1 +49.18 2 +49.19 3 +49.21 1 +49.23 1 +49.24 1 +49.26 3 +49.28 1 +49.30 1 +49.31 2 +49.34 1 +49.35 1 +49.38 2 +49.39 1 +49.40 1 +49.43 2 +49.44 1 +49.46 1 +49.49 2 +49.50 1 +49.51 1 +49.54 3 +49.55 1 +49.57 1 +49.60 3 +49.62 2 +49.65 1 +49.67 3 +49.69 2 +49.70 3 +49.71 2 +49.72 2 +49.73 4 +49.75 2 +49.78 1 +49.79 1 +49.80 2 +49.81 2 +49.82 2 +49.83 2 +49.84 2 +49.85 1 +49.87 1 +49.88 4 +49.89 2 +49.90 1 +49.91 3 +49.92 2 +49.93 2 +49.95 1 +49.97 1 +49.99 2 +5.00 2 +5.01 1 +5.02 1 +5.03 1 +5.04 1 +5.05 4 +5.06 1 +5.07 2 +5.09 2 +5.10 3 +5.12 1 +5.13 1 +5.14 2 +5.15 1 +5.16 1 +5.18 1 +5.19 1 +5.20 1 +5.21 2 +5.24 3 +5.26 1 +5.28 1 +5.31 2 +5.33 1 +5.35 1 +5.37 2 +5.39 1 +5.42 2 +5.43 1 +5.45 1 +5.46 4 +5.47 1 +5.49 3 +5.50 3 +5.51 1 +5.52 1 +5.53 1 +5.58 1 +5.63 1 +5.64 2 +5.65 2 +5.68 2 +5.69 1 +5.71 1 +5.72 2 +5.73 1 +5.75 1 +5.76 2 +5.81 1 +5.82 1 +5.83 3 +5.84 1 +5.86 2 +5.88 2 +5.89 1 +5.90 1 +5.93 3 +5.96 1 +5.97 1 +5.98 1 +5.99 1 +50.00 2 +50.01 2 +50.02 1 +50.03 2 +50.04 2 +50.06 1 +50.08 1 +50.09 1 +50.10 1 +50.13 2 +50.14 1 +50.15 1 +50.17 2 +50.21 1 +50.22 3 +50.25 2 +50.26 4 +50.29 1 +50.30 2 +50.31 2 +50.32 1 +50.33 1 +50.34 1 +50.38 1 +50.39 1 +50.40 1 +50.42 1 +50.45 2 +50.50 2 +50.51 1 +50.53 1 +50.54 1 +50.55 1 +50.56 1 +50.57 3 +50.59 2 +50.60 5 +50.61 3 +50.62 1 +50.63 1 +50.66 1 +50.67 1 +50.71 1 +50.72 1 +50.73 3 +50.75 1 +50.76 1 +50.78 2 +50.79 2 +50.80 2 +50.83 1 +50.84 2 +50.85 1 +50.86 2 +50.90 1 +50.96 1 +50.97 2 +50.98 1 +50.99 1 +51.01 1 +51.02 1 +51.04 2 +51.08 4 +51.09 2 +51.10 1 +51.12 1 +51.13 3 +51.14 3 +51.15 1 +51.18 1 +51.19 1 +51.21 1 +51.29 3 +51.32 1 +51.33 2 +51.36 2 +51.39 1 +51.40 2 +51.43 3 +51.45 1 +51.48 2 +51.52 1 +51.53 1 +51.54 3 +51.55 2 +51.56 2 +51.58 3 +51.59 1 +51.62 1 +51.64 1 +51.66 1 +51.68 4 +51.69 1 +51.70 1 +51.71 1 +51.74 2 +51.76 2 +51.78 1 +51.79 1 +51.82 1 +51.83 1 +51.84 2 +51.85 1 +51.86 1 +51.88 2 +51.89 2 +51.90 3 +51.91 3 +51.94 1 +51.95 2 +52.01 1 +52.02 2 +52.05 2 +52.08 3 +52.10 1 +52.12 1 +52.13 1 +52.17 1 +52.19 1 +52.20 1 +52.23 2 +52.24 2 +52.28 2 +52.29 1 +52.32 1 +52.33 1 +52.36 1 +52.38 2 +52.41 3 +52.42 1 +52.43 1 +52.45 1 +52.48 1 +52.49 1 +52.50 1 +52.51 2 +52.52 2 +52.53 1 +52.55 1 +52.56 2 +52.58 1 +52.59 2 +52.60 2 +52.61 1 +52.62 1 +52.63 1 +52.64 1 +52.65 2 +52.66 1 +52.67 1 +52.68 1 +52.69 2 +52.70 3 +52.72 1 +52.74 1 +52.75 1 +52.77 1 +52.78 2 +52.79 1 +52.80 3 +52.81 2 +52.83 2 +52.84 2 +52.85 1 +52.88 1 +52.89 2 +52.90 2 +52.92 1 +52.94 1 +52.95 2 +52.96 1 +52.97 1 +52.98 1 +53.02 1 +53.04 2 +53.06 1 +53.07 2 +53.08 2 +53.09 1 +53.10 2 +53.13 1 +53.14 3 +53.16 2 +53.21 1 +53.22 1 +53.24 1 +53.25 1 +53.27 2 +53.28 2 +53.29 1 +53.30 1 +53.31 3 +53.34 1 +53.35 2 +53.36 1 +53.37 3 +53.38 2 +53.39 2 +53.40 1 +53.41 3 +53.46 1 +53.47 1 +53.48 2 +53.49 1 +53.50 2 +53.51 1 +53.52 2 +53.53 3 +53.55 1 +53.56 2 +53.57 1 +53.58 1 +53.59 1 +53.60 1 +53.61 4 +53.63 1 +53.64 2 +53.65 4 +53.68 1 +53.69 1 +53.72 2 +53.73 1 +53.74 1 +53.75 1 +53.77 1 +53.79 1 +53.80 1 +53.82 1 +53.83 1 +53.84 2 +53.85 2 +53.86 1 +53.89 1 +53.91 2 +53.92 3 +53.93 1 +53.94 4 +53.96 1 +53.97 1 +53.98 2 +54.00 1 +54.01 1 +54.02 3 +54.03 3 +54.04 4 +54.05 2 +54.07 1 +54.09 1 +54.10 2 +54.11 1 +54.15 1 +54.16 2 +54.18 1 +54.19 1 +54.20 2 +54.21 1 +54.23 1 +54.25 1 +54.26 2 +54.27 1 +54.28 1 +54.29 2 +54.30 1 +54.31 2 +54.32 2 +54.33 1 +54.34 1 +54.35 2 +54.37 1 +54.39 1 +54.41 1 +54.42 1 +54.43 2 +54.45 2 +54.46 1 +54.47 2 +54.48 1 +54.49 1 +54.50 1 +54.51 1 +54.53 2 +54.54 1 +54.55 1 +54.56 1 +54.57 1 +54.58 1 +54.61 1 +54.62 2 +54.64 3 +54.65 1 +54.66 3 +54.67 2 +54.68 2 +54.69 2 +54.70 1 +54.72 3 +54.73 1 +54.74 1 +54.75 5 +54.76 3 +54.79 1 +54.80 2 +54.82 2 +54.85 1 +54.86 1 +54.88 1 +54.89 1 +54.90 1 +54.92 1 +54.93 1 +54.94 1 +54.96 1 +54.98 3 +55.00 2 +55.02 1 +55.03 1 +55.04 3 +55.05 1 +55.06 1 +55.07 1 +55.09 1 +55.10 1 +55.12 3 +55.13 1 +55.14 1 +55.16 1 +55.18 1 +55.19 1 +55.22 1 +55.23 2 +55.24 1 +55.25 1 +55.26 1 +55.30 2 +55.31 1 +55.33 1 +55.36 1 +55.37 2 +55.38 2 +55.39 3 +55.40 2 +55.41 4 +55.42 1 +55.43 1 +55.44 1 +55.45 3 +55.47 2 +55.49 2 +55.50 1 +55.56 1 +55.59 1 +55.60 1 +55.61 1 +55.64 1 +55.66 1 +55.68 3 +55.69 1 +55.70 2 +55.71 1 +55.73 1 +55.75 2 +55.76 3 +55.77 1 +55.78 1 +55.80 1 +55.81 2 +55.82 2 +55.83 2 +55.84 1 +55.86 1 +55.87 1 +55.89 2 +55.90 4 +55.92 1 +55.93 1 +55.94 4 +55.96 1 +55.97 2 +56.00 1 +56.01 1 +56.02 2 +56.04 3 +56.05 1 +56.06 2 +56.07 2 +56.08 2 +56.10 1 +56.11 1 +56.12 1 +56.13 1 +56.15 2 +56.16 1 +56.18 1 +56.20 1 +56.21 1 +56.22 1 +56.23 1 +56.27 1 +56.28 1 +56.32 2 +56.33 3 +56.34 1 +56.36 2 +56.37 1 +56.38 1 +56.41 1 +56.43 1 +56.44 1 +56.45 2 +56.48 1 +56.49 1 +56.52 1 +56.53 2 +56.54 1 +56.55 1 +56.56 2 +56.57 1 +56.58 1 +56.59 3 +56.60 2 +56.63 2 +56.64 2 +56.65 4 +56.70 3 +56.72 1 +56.73 2 +56.74 4 +56.79 1 +56.82 2 +56.83 4 +56.84 1 +56.85 2 +56.86 2 +56.90 2 +56.97 1 +56.98 2 +56.99 1 +57.01 1 +57.03 2 +57.07 1 +57.09 2 +57.10 3 +57.11 1 +57.12 2 +57.15 1 +57.17 2 +57.19 1 +57.20 1 +57.21 3 +57.22 2 +57.23 1 +57.25 2 +57.26 1 +57.28 1 +57.29 2 +57.30 1 +57.32 1 +57.33 4 +57.34 2 +57.36 1 +57.37 1 +57.38 2 +57.39 2 +57.40 1 +57.41 1 +57.42 1 +57.46 1 +57.47 1 +57.48 1 +57.49 2 +57.50 1 +57.53 1 +57.55 2 +57.56 1 +57.58 5 +57.59 1 +57.60 1 +57.61 3 +57.63 1 +57.64 1 +57.65 2 +57.66 2 +57.67 3 +57.68 2 +57.69 3 +57.71 2 +57.73 1 +57.74 1 +57.79 2 +57.80 1 +57.82 2 +57.83 1 +57.86 2 +57.87 2 +57.88 2 +57.89 2 +57.90 1 +57.91 2 +57.93 1 +57.94 1 +57.95 1 +57.97 1 +57.99 3 +58.01 1 +58.02 1 +58.03 3 +58.04 1 +58.05 5 +58.06 2 +58.07 1 +58.08 1 +58.10 2 +58.12 1 +58.14 1 +58.15 2 +58.19 1 +58.20 1 +58.21 1 +58.23 3 +58.24 1 +58.25 3 +58.27 2 +58.28 2 +58.29 1 +58.30 1 +58.31 1 +58.34 1 +58.38 1 +58.39 2 +58.40 2 +58.41 3 +58.42 1 +58.44 1 +58.45 1 +58.46 1 +58.47 1 +58.50 1 +58.51 1 +58.52 1 +58.54 3 +58.56 1 +58.58 1 +58.59 1 +58.60 1 +58.62 1 +58.63 1 +58.64 2 +58.65 1 +58.66 1 +58.67 2 +58.68 2 +58.69 1 +58.70 1 +58.71 1 +58.75 2 +58.78 2 +58.79 4 +58.80 4 +58.81 3 +58.82 4 +58.83 3 +58.84 2 +58.85 3 +58.86 2 +58.87 1 +58.88 1 +58.89 1 +58.90 4 +58.92 2 +58.94 1 +58.96 2 +58.97 2 +58.99 2 +59.00 1 +59.01 1 +59.02 2 +59.03 3 +59.06 1 +59.08 1 +59.10 2 +59.12 2 +59.13 2 +59.16 1 +59.17 2 +59.18 1 +59.19 1 +59.21 1 +59.24 1 +59.26 1 +59.28 2 +59.29 1 +59.30 1 +59.32 2 +59.33 1 +59.35 2 +59.36 1 +59.37 1 +59.38 1 +59.40 2 +59.41 2 +59.42 2 +59.43 2 +59.44 2 +59.46 2 +59.47 2 +59.48 1 +59.49 5 +59.51 2 +59.52 1 +59.53 2 +59.54 1 +59.55 2 +59.56 3 +59.57 1 +59.58 1 +59.59 1 +59.62 1 +59.63 1 +59.64 2 +59.67 1 +59.68 2 +59.69 2 +59.70 1 +59.71 2 +59.72 1 +59.74 1 +59.76 1 +59.77 3 +59.78 1 +59.79 1 +59.82 1 +59.83 1 +59.89 1 +59.90 1 +59.91 1 +59.92 2 +59.93 2 +59.94 1 +59.95 2 +59.96 1 +59.99 1 +6.00 2 +6.02 2 +6.03 1 +6.04 1 +6.06 2 +6.09 1 +6.10 1 +6.11 1 +6.14 1 +6.15 1 +6.16 1 +6.17 1 +6.18 1 +6.19 2 +6.21 2 +6.22 1 +6.24 2 +6.25 1 +6.26 1 +6.29 2 +6.30 1 +6.35 2 +6.37 1 +6.39 2 +6.40 1 +6.41 1 +6.42 2 +6.43 1 +6.47 1 +6.48 1 +6.50 1 +6.54 2 +6.57 1 +6.59 1 +6.60 1 +6.61 4 +6.62 1 +6.63 1 +6.64 3 +6.66 1 +6.71 1 +6.74 1 +6.76 2 +6.78 2 +6.79 2 +6.81 1 +6.82 2 +6.83 2 +6.84 1 +6.85 2 +6.87 1 +6.88 1 +6.89 2 +6.90 1 +6.92 1 +6.93 1 +6.99 1 +60.00 2 +60.01 3 +60.02 1 +60.04 2 +60.05 3 +60.13 1 +60.16 1 +60.17 2 +60.18 3 +60.20 2 +60.22 1 +60.23 1 +60.24 2 +60.25 2 +60.26 1 +60.29 3 +60.30 1 +60.32 1 +60.35 2 +60.36 1 +60.37 2 +60.38 1 +60.39 1 +60.41 1 +60.42 1 +60.45 1 +60.46 3 +60.48 1 +60.51 1 +60.52 1 +60.53 1 +60.55 1 +60.56 2 +60.57 2 +60.58 1 +60.59 1 +60.60 1 +60.62 1 +60.63 1 +60.64 1 +60.66 2 +60.67 2 +60.70 1 +60.75 2 +60.77 1 +60.78 2 +60.80 2 +60.81 1 +60.82 1 +60.83 1 +60.85 3 +60.86 1 +60.87 3 +60.88 2 +60.89 1 +60.90 2 +60.92 1 +60.93 1 +60.94 2 +60.96 1 +60.97 1 +60.98 1 +60.99 1 +61.00 2 +61.02 1 +61.03 1 +61.04 1 +61.05 2 +61.06 1 +61.08 1 +61.11 4 +61.12 2 +61.13 2 +61.14 1 +61.15 2 +61.16 2 +61.18 1 +61.19 1 +61.20 1 +61.22 1 +61.23 1 +61.24 3 +61.26 1 +61.28 2 +61.29 2 +61.30 3 +61.31 2 +61.33 1 +61.34 3 +61.36 3 +61.38 2 +61.39 1 +61.41 2 +61.42 1 +61.43 1 +61.44 2 +61.46 1 +61.47 1 +61.50 4 +61.52 3 +61.53 1 +61.54 1 +61.55 1 +61.56 4 +61.57 2 +61.58 2 +61.59 2 +61.64 1 +61.67 1 +61.68 2 +61.69 1 +61.70 1 +61.77 2 +61.79 1 +61.81 2 +61.82 2 +61.83 1 +61.84 1 +61.85 2 +61.88 2 +61.89 3 +61.90 2 +61.93 1 +61.94 2 +61.96 2 +61.99 1 +62.00 2 +62.01 3 +62.02 1 +62.03 1 +62.04 2 +62.09 2 +62.11 3 +62.12 1 +62.14 1 +62.16 2 +62.17 1 +62.20 2 +62.21 1 +62.25 2 +62.26 2 +62.28 1 +62.31 2 +62.32 2 +62.33 1 +62.34 1 +62.35 4 +62.36 1 +62.37 1 +62.39 2 +62.41 1 +62.42 2 +62.43 2 +62.44 1 +62.45 1 +62.47 1 +62.48 2 +62.49 1 +62.51 2 +62.52 1 +62.53 1 +62.56 1 +62.58 1 +62.59 4 +62.60 1 +62.62 1 +62.65 2 +62.66 1 +62.67 1 +62.68 1 +62.70 3 +62.71 1 +62.72 1 +62.73 2 +62.77 1 +62.79 1 +62.80 2 +62.81 3 +62.82 1 +62.83 1 +62.84 4 +62.86 1 +62.89 3 +62.91 2 +62.92 1 +62.93 3 +62.95 2 +62.96 1 +62.98 1 +62.99 2 +63.00 1 +63.01 2 +63.02 1 +63.03 1 +63.05 2 +63.06 1 +63.07 1 +63.08 2 +63.10 1 +63.11 1 +63.12 1 +63.14 4 +63.15 1 +63.16 1 +63.17 1 +63.18 1 +63.21 2 +63.22 1 +63.23 2 +63.24 3 +63.25 2 +63.26 1 +63.27 2 +63.28 2 +63.29 2 +63.30 3 +63.31 1 +63.33 1 +63.34 1 +63.35 1 +63.36 3 +63.39 2 +63.40 1 +63.41 1 +63.43 1 +63.46 1 +63.47 1 +63.48 1 +63.49 3 +63.50 1 +63.51 2 +63.52 1 +63.54 2 +63.55 3 +63.57 2 +63.58 1 +63.59 1 +63.60 1 +63.61 1 +63.64 1 +63.65 3 +63.66 2 +63.70 1 +63.72 2 +63.73 1 +63.74 2 +63.75 1 +63.77 2 +63.78 3 +63.80 2 +63.81 2 +63.83 1 +63.85 1 +63.87 1 +63.90 2 +63.92 2 +63.93 3 +63.94 3 +63.99 3 +64.00 2 +64.01 2 +64.04 1 +64.05 2 +64.06 2 +64.09 2 +64.12 2 +64.14 1 +64.15 2 +64.16 1 +64.17 1 +64.18 2 +64.19 2 +64.20 2 +64.21 2 +64.22 3 +64.23 3 +64.24 2 +64.28 1 +64.29 2 +64.30 1 +64.32 1 +64.33 2 +64.34 1 +64.35 2 +64.36 2 +64.38 2 +64.39 2 +64.40 4 +64.41 2 +64.42 2 +64.43 1 +64.44 2 +64.45 1 +64.46 2 +64.49 2 +64.52 2 +64.55 1 +64.60 1 +64.61 1 +64.62 1 +64.63 1 +64.65 3 +64.68 1 +64.69 1 +64.70 2 +64.71 3 +64.75 3 +64.76 1 +64.77 3 +64.79 1 +64.80 1 +64.81 2 +64.82 1 +64.84 1 +64.88 3 +64.89 1 +64.90 2 +64.91 3 +64.94 2 +64.95 3 +64.98 3 +64.99 2 +65.01 1 +65.02 1 +65.03 1 +65.05 1 +65.06 1 +65.07 1 +65.08 2 +65.13 2 +65.16 1 +65.17 2 +65.18 3 +65.20 1 +65.21 2 +65.22 1 +65.23 1 +65.24 2 +65.25 1 +65.26 1 +65.27 1 +65.28 2 +65.30 1 +65.31 1 +65.32 1 +65.33 2 +65.34 1 +65.35 1 +65.37 5 +65.40 1 +65.42 1 +65.43 1 +65.45 2 +65.46 2 +65.49 2 +65.50 3 +65.53 1 +65.57 1 +65.58 2 +65.61 1 +65.62 1 +65.66 2 +65.69 1 +65.72 1 +65.74 1 +65.75 1 +65.77 3 +65.78 2 +65.79 2 +65.81 2 +65.82 3 +65.85 5 +65.86 1 +65.87 2 +65.88 1 +65.89 1 +65.90 1 +65.91 1 +65.92 1 +65.93 1 +65.98 1 +65.99 1 +66.01 1 +66.02 1 +66.07 2 +66.08 1 +66.09 1 +66.10 1 +66.11 2 +66.13 1 +66.15 2 +66.16 2 +66.19 1 +66.22 1 +66.23 3 +66.24 2 +66.25 1 +66.27 2 +66.30 1 +66.32 2 +66.33 1 +66.34 3 +66.36 3 +66.37 1 +66.39 1 +66.41 1 +66.43 2 +66.45 1 +66.46 1 +66.47 1 +66.49 1 +66.52 1 +66.53 3 +66.54 1 +66.55 1 +66.56 1 +66.57 1 +66.58 2 +66.59 1 +66.60 1 +66.61 1 +66.62 2 +66.64 1 +66.67 1 +66.68 2 +66.71 1 +66.74 2 +66.76 2 +66.78 1 +66.79 1 +66.80 2 +66.81 1 +66.83 1 +66.84 1 +66.85 2 +66.88 1 +66.89 3 +66.91 3 +66.93 2 +66.94 1 +66.95 1 +66.98 2 +66.99 1 +67.00 1 +67.02 2 +67.03 1 +67.04 3 +67.05 1 +67.07 1 +67.08 1 +67.09 1 +67.10 1 +67.14 1 +67.16 1 +67.17 1 +67.18 3 +67.20 1 +67.22 2 +67.23 2 +67.24 1 +67.29 2 +67.30 1 +67.34 2 +67.37 1 +67.40 1 +67.41 1 +67.42 1 +67.46 1 +67.50 3 +67.52 1 +67.54 4 +67.55 1 +67.56 1 +67.59 1 +67.60 1 +67.65 1 +67.66 1 +67.69 1 +67.70 1 +67.71 1 +67.73 1 +67.74 2 +67.78 1 +67.80 2 +67.81 2 +67.82 2 +67.83 2 +67.85 2 +67.87 4 +67.89 1 +67.90 2 +67.91 3 +67.93 2 +67.94 1 +67.99 1 +68.00 3 +68.01 3 +68.02 3 +68.04 2 +68.05 1 +68.06 1 +68.07 1 +68.08 1 +68.09 7 +68.10 2 +68.12 1 +68.14 1 +68.15 2 +68.16 1 +68.17 1 +68.18 1 +68.21 3 +68.24 1 +68.25 3 +68.26 2 +68.28 2 +68.30 3 +68.32 1 +68.33 1 +68.35 2 +68.36 1 +68.39 1 +68.40 3 +68.42 1 +68.43 1 +68.45 2 +68.46 1 +68.48 2 +68.49 2 +68.50 1 +68.54 2 +68.57 1 +68.60 1 +68.64 1 +68.67 1 +68.68 1 +68.72 1 +68.74 1 +68.76 2 +68.79 1 +68.80 4 +68.81 2 +68.82 1 +68.83 1 +68.84 2 +68.85 1 +68.87 1 +68.88 1 +68.89 2 +68.90 2 +68.91 1 +68.92 1 +68.94 1 +68.95 2 +68.97 2 +68.98 1 +68.99 1 +69.00 1 +69.01 2 +69.02 2 +69.03 1 +69.05 1 +69.06 3 +69.11 1 +69.13 2 +69.14 3 +69.16 3 +69.17 1 +69.22 1 +69.24 1 +69.26 3 +69.27 2 +69.28 2 +69.29 3 +69.30 2 +69.32 1 +69.33 6 +69.34 1 +69.36 2 +69.37 1 +69.38 1 +69.41 2 +69.42 3 +69.43 1 +69.44 1 +69.45 2 +69.46 2 +69.47 4 +69.48 2 +69.49 1 +69.52 3 +69.53 1 +69.54 1 +69.55 2 +69.58 3 +69.60 2 +69.62 1 +69.64 1 +69.67 1 +69.68 1 +69.72 2 +69.75 2 +69.76 4 +69.78 1 +69.79 2 +69.80 2 +69.81 1 +69.82 1 +69.84 1 +69.85 4 +69.86 1 +69.87 1 +69.88 1 +69.89 1 +69.90 2 +69.91 1 +69.92 2 +69.93 1 +69.94 1 +69.96 1 +69.97 1 +69.98 3 +7.00 1 +7.02 1 +7.03 3 +7.04 2 +7.06 2 +7.08 2 +7.09 1 +7.10 2 +7.11 1 +7.12 1 +7.13 1 +7.14 2 +7.16 1 +7.17 1 +7.19 2 +7.20 1 +7.21 1 +7.22 1 +7.23 1 +7.24 1 +7.26 1 +7.27 2 +7.28 2 +7.30 1 +7.31 1 +7.33 1 +7.34 2 +7.36 4 +7.37 3 +7.38 2 +7.39 3 +7.42 1 +7.45 1 +7.46 2 +7.49 3 +7.50 1 +7.51 1 +7.52 2 +7.54 1 +7.57 2 +7.59 4 +7.61 2 +7.62 1 +7.66 1 +7.67 4 +7.71 1 +7.72 1 +7.73 1 +7.74 1 +7.76 1 +7.80 2 +7.81 1 +7.82 2 +7.83 2 +7.84 1 +7.87 1 +7.88 2 +7.89 2 +7.90 2 +7.92 3 +7.93 1 +7.94 1 +7.95 2 +7.97 3 +7.98 1 +7.99 2 +70.00 1 +70.01 3 +70.02 3 +70.06 2 +70.08 2 +70.10 2 +70.12 1 +70.13 1 +70.14 2 +70.15 1 +70.16 1 +70.25 1 +70.27 1 +70.29 1 +70.31 1 +70.32 2 +70.36 1 +70.38 1 +70.41 1 +70.43 1 +70.45 2 +70.48 1 +70.49 1 +70.50 1 +70.51 2 +70.52 3 +70.53 1 +70.54 2 +70.57 3 +70.58 2 +70.59 1 +70.60 1 +70.62 1 +70.63 2 +70.64 2 +70.66 1 +70.67 2 +70.70 2 +70.74 1 +70.75 1 +70.76 1 +70.78 3 +70.79 1 +70.80 2 +70.81 3 +70.82 1 +70.84 1 +70.87 1 +70.88 1 +70.89 2 +70.90 1 +70.91 2 +70.92 2 +70.93 1 +70.96 1 +71.00 1 +71.01 1 +71.02 1 +71.05 1 +71.07 3 +71.08 1 +71.09 1 +71.12 1 +71.14 1 +71.15 3 +71.16 1 +71.17 1 +71.20 1 +71.21 1 +71.22 1 +71.24 1 +71.25 1 +71.27 1 +71.28 1 +71.29 1 +71.30 1 +71.33 1 +71.35 2 +71.37 1 +71.38 1 +71.39 2 +71.42 1 +71.43 1 +71.45 1 +71.48 1 +71.49 1 +71.51 2 +71.53 1 +71.54 1 +71.55 1 +71.56 1 +71.57 3 +71.59 2 +71.61 1 +71.62 2 +71.65 1 +71.66 5 +71.67 1 +71.72 2 +71.73 1 +71.75 1 +71.76 1 +71.77 1 +71.78 1 +71.79 2 +71.80 2 +71.82 2 +71.83 3 +71.84 2 +71.86 1 +71.87 3 +71.88 1 +71.89 1 +71.90 3 +71.91 1 +71.93 1 +71.94 3 +71.95 1 +71.97 1 +71.99 2 +72.00 2 +72.03 1 +72.04 4 +72.07 1 +72.09 1 +72.10 1 +72.11 1 +72.13 2 +72.14 1 +72.15 1 +72.16 3 +72.17 2 +72.19 1 +72.22 3 +72.23 1 +72.24 1 +72.30 2 +72.31 2 +72.32 1 +72.33 3 +72.36 1 +72.37 2 +72.39 3 +72.41 1 +72.43 1 +72.44 2 +72.45 1 +72.46 2 +72.47 3 +72.48 1 +72.50 2 +72.53 1 +72.54 4 +72.55 3 +72.56 1 +72.58 2 +72.59 2 +72.60 1 +72.62 2 +72.63 1 +72.64 1 +72.65 2 +72.67 2 +72.69 1 +72.70 2 +72.73 1 +72.75 1 +72.76 1 +72.79 1 +72.80 1 +72.82 2 +72.83 2 +72.84 1 +72.86 1 +72.87 2 +72.89 1 +72.90 3 +72.91 1 +72.92 1 +72.95 2 +72.96 1 +72.97 1 +72.99 3 +73.01 1 +73.02 1 +73.03 1 +73.06 2 +73.07 3 +73.08 3 +73.09 2 +73.12 1 +73.13 2 +73.16 2 +73.20 2 +73.21 1 +73.23 1 +73.24 1 +73.27 1 +73.28 1 +73.32 1 +73.35 2 +73.37 1 +73.38 1 +73.39 1 +73.41 2 +73.42 1 +73.45 2 +73.46 3 +73.47 2 +73.48 4 +73.51 2 +73.52 1 +73.54 2 +73.55 1 +73.56 1 +73.58 1 +73.59 1 +73.61 1 +73.64 3 +73.65 1 +73.66 1 +73.67 1 +73.68 1 +73.69 1 +73.70 2 +73.72 1 +73.73 1 +73.75 3 +73.76 1 +73.77 1 +73.79 2 +73.80 3 +73.84 1 +73.85 1 +73.86 1 +73.88 1 +73.89 2 +73.90 1 +73.91 1 +73.93 2 +73.94 1 +73.95 1 +73.97 1 +73.98 2 +73.99 1 +74.00 1 +74.01 1 +74.02 1 +74.06 1 +74.07 1 +74.08 2 +74.09 2 +74.10 1 +74.11 1 +74.12 1 +74.13 3 +74.14 2 +74.16 1 +74.17 2 +74.22 1 +74.23 2 +74.24 1 +74.25 3 +74.28 1 +74.29 2 +74.31 2 +74.32 2 +74.33 1 +74.36 2 +74.38 2 +74.39 1 +74.41 1 +74.42 1 +74.44 2 +74.46 1 +74.47 1 +74.48 3 +74.49 1 +74.50 1 +74.51 1 +74.52 2 +74.53 1 +74.55 1 +74.57 1 +74.58 1 +74.59 2 +74.60 4 +74.61 1 +74.63 1 +74.64 2 +74.67 1 +74.68 1 +74.69 1 +74.72 1 +74.73 4 +74.75 2 +74.76 1 +74.77 1 +74.79 1 +74.80 1 +74.81 1 +74.83 1 +74.84 1 +74.85 1 +74.89 1 +74.91 1 +74.92 1 +74.93 1 +74.95 2 +74.97 1 +74.98 1 +75.00 1 +75.02 2 +75.03 1 +75.04 4 +75.06 3 +75.07 1 +75.09 2 +75.10 3 +75.11 3 +75.14 3 +75.15 3 +75.18 2 +75.20 1 +75.21 1 +75.22 1 +75.23 1 +75.25 1 +75.26 1 +75.27 1 +75.29 1 +75.30 1 +75.32 1 +75.33 2 +75.36 1 +75.39 3 +75.40 2 +75.42 1 +75.43 1 +75.44 1 +75.45 2 +75.46 1 +75.48 2 +75.50 1 +75.51 1 +75.52 1 +75.53 2 +75.55 1 +75.56 1 +75.57 2 +75.58 1 +75.59 1 +75.61 1 +75.62 2 +75.64 2 +75.65 1 +75.66 2 +75.68 1 +75.69 1 +75.70 2 +75.71 2 +75.73 2 +75.74 1 +75.75 1 +75.76 2 +75.77 2 +75.78 2 +75.79 1 +75.80 3 +75.81 1 +75.82 1 +75.84 2 +75.85 1 +75.86 1 +75.87 2 +75.88 4 +75.90 1 +75.91 1 +75.92 1 +75.95 2 +75.97 3 +75.99 2 +76.00 2 +76.01 1 +76.02 1 +76.03 1 +76.04 1 +76.06 3 +76.07 1 +76.09 4 +76.11 1 +76.12 1 +76.13 1 +76.14 1 +76.16 2 +76.17 3 +76.18 2 +76.19 3 +76.20 4 +76.21 3 +76.24 1 +76.25 1 +76.26 1 +76.27 1 +76.28 1 +76.29 2 +76.30 1 +76.31 3 +76.33 1 +76.34 1 +76.35 2 +76.37 1 +76.39 1 +76.40 2 +76.44 1 +76.45 1 +76.46 1 +76.49 1 +76.50 1 +76.51 2 +76.52 1 +76.53 1 +76.54 3 +76.57 2 +76.58 3 +76.60 1 +76.61 1 +76.63 2 +76.64 3 +76.65 3 +76.67 1 +76.70 1 +76.72 2 +76.73 1 +76.75 2 +76.76 1 +76.77 2 +76.79 1 +76.80 3 +76.81 1 +76.83 2 +76.87 2 +76.88 1 +76.89 1 +76.92 1 +76.93 2 +76.95 4 +76.97 1 +76.98 2 +76.99 1 +77.00 1 +77.01 1 +77.02 1 +77.03 1 +77.06 1 +77.07 3 +77.09 1 +77.10 1 +77.12 1 +77.13 2 +77.16 2 +77.17 1 +77.18 3 +77.21 2 +77.22 1 +77.23 1 +77.24 1 +77.25 1 +77.26 1 +77.27 1 +77.29 2 +77.30 1 +77.31 1 +77.32 1 +77.33 1 +77.34 1 +77.35 1 +77.38 1 +77.39 2 +77.40 3 +77.41 1 +77.43 1 +77.44 3 +77.45 1 +77.48 1 +77.51 1 +77.52 2 +77.53 1 +77.55 2 +77.56 2 +77.57 3 +77.59 1 +77.60 1 +77.61 2 +77.62 2 +77.63 2 +77.64 2 +77.66 3 +77.72 1 +77.75 5 +77.76 1 +77.77 2 +77.78 2 +77.79 1 +77.80 1 +77.83 1 +77.84 1 +77.85 1 +77.86 1 +77.87 2 +77.88 2 +77.92 2 +77.93 2 +77.95 2 +77.96 1 +77.97 1 +77.99 1 +78.01 1 +78.05 3 +78.07 2 +78.11 1 +78.13 1 +78.14 1 +78.15 1 +78.16 1 +78.17 1 +78.18 1 +78.22 2 +78.24 2 +78.25 1 +78.28 1 +78.30 1 +78.32 1 +78.33 1 +78.35 2 +78.36 3 +78.39 1 +78.40 1 +78.41 2 +78.43 1 +78.45 1 +78.46 1 +78.48 2 +78.52 1 +78.53 1 +78.55 3 +78.56 3 +78.57 1 +78.59 1 +78.60 1 +78.62 2 +78.65 1 +78.66 1 +78.69 2 +78.73 2 +78.75 3 +78.76 1 +78.78 1 +78.80 1 +78.83 2 +78.84 2 +78.85 3 +78.86 2 +78.90 1 +78.91 1 +78.92 3 +78.93 2 +78.95 1 +78.97 1 +78.98 1 +79.00 1 +79.02 4 +79.03 2 +79.04 3 +79.06 1 +79.07 2 +79.08 4 +79.09 3 +79.10 2 +79.11 1 +79.13 2 +79.14 1 +79.15 2 +79.16 2 +79.17 1 +79.20 2 +79.21 1 +79.22 1 +79.25 1 +79.26 1 +79.28 2 +79.29 1 +79.30 1 +79.32 1 +79.33 2 +79.34 1 +79.37 1 +79.38 1 +79.39 1 +79.40 2 +79.41 2 +79.42 1 +79.44 1 +79.45 2 +79.46 2 +79.48 1 +79.49 1 +79.50 1 +79.51 2 +79.53 1 +79.55 1 +79.56 1 +79.57 2 +79.58 1 +79.59 1 +79.60 3 +79.63 2 +79.64 1 +79.65 3 +79.66 1 +79.67 1 +79.69 1 +79.74 1 +79.75 1 +79.77 2 +79.80 2 +79.81 1 +79.83 1 +79.84 1 +79.85 3 +79.88 1 +79.89 1 +79.90 1 +79.91 1 +79.92 2 +79.93 2 +79.94 1 +79.95 1 +79.96 2 +79.99 1 +8.00 1 +8.01 1 +8.02 1 +8.03 1 +8.04 1 +8.06 3 +8.07 1 +8.08 1 +8.09 1 +8.12 2 +8.13 1 +8.16 1 +8.17 1 +8.22 1 +8.25 1 +8.26 1 +8.27 1 +8.28 2 +8.30 2 +8.31 1 +8.33 1 +8.34 1 +8.35 2 +8.38 2 +8.39 1 +8.40 1 +8.43 1 +8.44 1 +8.46 1 +8.48 4 +8.49 1 +8.52 1 +8.53 1 +8.54 1 +8.55 2 +8.57 3 +8.58 1 +8.59 1 +8.60 1 +8.62 1 +8.63 2 +8.64 3 +8.65 4 +8.66 1 +8.67 1 +8.68 1 +8.69 3 +8.71 1 +8.72 1 +8.73 2 +8.74 1 +8.76 3 +8.77 2 +8.78 2 +8.80 1 +8.84 2 +8.85 2 +8.86 3 +8.88 1 +8.89 2 +8.90 3 +8.91 3 +8.93 1 +8.94 1 +8.95 2 +8.96 1 +8.97 1 +8.98 2 +8.99 1 +80.00 1 +80.01 1 +80.02 1 +80.05 2 +80.08 2 +80.09 5 +80.10 1 +80.11 3 +80.12 1 +80.13 2 +80.14 1 +80.15 1 +80.21 1 +80.24 2 +80.26 1 +80.27 1 +80.30 1 +80.31 2 +80.36 1 +80.38 1 +80.40 2 +80.42 1 +80.43 1 +80.44 1 +80.45 2 +80.47 1 +80.48 2 +80.50 1 +80.51 2 +80.52 1 +80.54 1 +80.55 2 +80.56 1 +80.59 1 +80.61 2 +80.62 1 +80.63 1 +80.64 1 +80.68 1 +80.69 3 +80.71 1 +80.72 2 +80.73 2 +80.76 2 +80.77 2 +80.80 1 +80.81 4 +80.82 3 +80.83 3 +80.84 1 +80.85 1 +80.88 1 +80.90 1 +80.93 1 +80.94 1 +80.95 1 +80.96 3 +80.97 2 +80.98 1 +80.99 2 +81.02 2 +81.03 3 +81.04 3 +81.06 1 +81.10 1 +81.11 1 +81.13 2 +81.14 1 +81.15 1 +81.16 2 +81.18 1 +81.19 1 +81.20 1 +81.21 2 +81.22 1 +81.23 3 +81.24 1 +81.26 1 +81.27 2 +81.28 3 +81.29 1 +81.30 1 +81.35 3 +81.36 1 +81.37 3 +81.38 1 +81.39 1 +81.40 1 +81.41 1 +81.42 2 +81.43 1 +81.44 3 +81.49 1 +81.50 1 +81.51 3 +81.53 1 +81.54 2 +81.57 1 +81.58 2 +81.59 1 +81.60 1 +81.61 1 +81.66 1 +81.67 1 +81.68 1 +81.69 1 +81.70 2 +81.71 1 +81.72 1 +81.74 2 +81.76 1 +81.78 1 +81.80 1 +81.81 1 +81.85 2 +81.87 2 +81.88 1 +81.89 1 +81.90 1 +81.91 1 +81.93 2 +81.95 2 +81.96 3 +81.98 2 +81.99 2 +82.00 1 +82.02 2 +82.03 1 +82.04 2 +82.05 1 +82.06 2 +82.07 1 +82.08 2 +82.10 2 +82.12 2 +82.16 1 +82.19 1 +82.20 2 +82.21 2 +82.23 1 +82.24 2 +82.25 2 +82.28 2 +82.29 2 +82.31 1 +82.32 1 +82.33 2 +82.34 1 +82.36 2 +82.37 1 +82.39 4 +82.40 1 +82.41 1 +82.43 1 +82.44 1 +82.46 1 +82.47 1 +82.48 3 +82.49 1 +82.50 1 +82.52 1 +82.53 4 +82.54 1 +82.56 1 +82.57 1 +82.59 1 +82.60 1 +82.61 2 +82.62 1 +82.64 2 +82.67 2 +82.68 1 +82.69 1 +82.70 3 +82.71 2 +82.72 1 +82.73 1 +82.74 2 +82.75 1 +82.78 2 +82.79 3 +82.83 3 +82.85 1 +82.87 1 +82.89 1 +82.90 2 +82.91 1 +82.92 1 +82.93 1 +82.94 3 +82.95 1 +82.96 2 +82.98 2 +82.99 1 +83.00 2 +83.01 2 +83.03 3 +83.04 1 +83.05 1 +83.06 2 +83.08 1 +83.09 1 +83.10 3 +83.12 1 +83.13 2 +83.15 2 +83.16 1 +83.18 1 +83.19 1 +83.20 1 +83.21 5 +83.22 1 +83.23 1 +83.24 1 +83.26 3 +83.27 2 +83.29 2 +83.31 1 +83.33 2 +83.36 1 +83.37 2 +83.38 1 +83.39 1 +83.40 1 +83.42 2 +83.43 1 +83.44 2 +83.46 2 +83.47 1 +83.48 3 +83.49 1 +83.50 2 +83.51 1 +83.52 2 +83.53 1 +83.54 1 +83.60 1 +83.61 1 +83.62 1 +83.63 2 +83.64 3 +83.65 1 +83.66 2 +83.67 1 +83.69 1 +83.71 1 +83.73 1 +83.75 1 +83.76 1 +83.78 1 +83.79 2 +83.80 2 +83.81 1 +83.82 3 +83.83 1 +83.85 1 +83.86 2 +83.89 2 +83.92 2 +83.93 1 +83.94 2 +83.95 4 +83.96 1 +84.01 2 +84.02 2 +84.03 2 +84.05 1 +84.07 2 +84.08 2 +84.10 2 +84.12 2 +84.13 1 +84.17 3 +84.18 1 +84.20 2 +84.23 2 +84.25 1 +84.26 2 +84.27 1 +84.28 1 +84.29 2 +84.32 1 +84.35 3 +84.37 3 +84.38 1 +84.40 3 +84.41 2 +84.42 3 +84.43 1 +84.44 2 +84.45 1 +84.46 1 +84.47 1 +84.50 1 +84.55 1 +84.56 1 +84.57 1 +84.58 1 +84.60 3 +84.61 2 +84.62 1 +84.63 2 +84.64 1 +84.65 1 +84.69 2 +84.70 1 +84.71 1 +84.72 1 +84.73 1 +84.75 1 +84.76 1 +84.78 1 +84.79 1 +84.80 2 +84.81 1 +84.83 2 +84.86 1 +84.87 1 +84.88 2 +84.90 3 +84.91 1 +84.92 1 +84.93 2 +84.95 2 +84.96 2 +84.97 3 +84.99 1 +85.00 1 +85.01 3 +85.02 1 +85.08 1 +85.09 2 +85.13 1 +85.16 1 +85.17 1 +85.19 1 +85.21 1 +85.22 2 +85.25 1 +85.26 5 +85.27 1 +85.28 1 +85.29 1 +85.31 1 +85.34 2 +85.36 1 +85.37 1 +85.38 2 +85.39 2 +85.40 3 +85.41 1 +85.43 3 +85.44 2 +85.48 1 +85.49 1 +85.50 2 +85.52 3 +85.54 1 +85.55 3 +85.58 1 +85.59 1 +85.60 1 +85.61 1 +85.62 1 +85.63 1 +85.64 2 +85.65 4 +85.67 1 +85.68 1 +85.70 2 +85.73 1 +85.74 1 +85.75 1 +85.76 1 +85.79 1 +85.80 1 +85.82 2 +85.83 2 +85.85 1 +85.86 1 +85.87 1 +85.88 2 +85.89 1 +85.90 1 +85.91 1 +85.92 1 +85.93 1 +85.94 2 +85.97 1 +85.98 1 +85.99 1 +86.02 2 +86.04 1 +86.10 1 +86.11 1 +86.12 2 +86.13 1 +86.14 1 +86.15 2 +86.18 1 +86.19 3 +86.20 1 +86.21 1 +86.23 2 +86.24 1 +86.25 1 +86.26 2 +86.27 2 +86.28 1 +86.29 3 +86.33 1 +86.34 2 +86.38 2 +86.39 1 +86.40 1 +86.41 2 +86.43 2 +86.44 1 +86.49 2 +86.50 1 +86.51 3 +86.52 2 +86.53 1 +86.54 1 +86.58 1 +86.59 1 +86.60 3 +86.61 2 +86.64 1 +86.65 2 +86.66 2 +86.69 1 +86.70 3 +86.71 2 +86.72 1 +86.73 1 +86.75 3 +86.76 1 +86.78 5 +86.79 2 +86.82 1 +86.84 2 +86.85 6 +86.86 5 +86.87 1 +86.90 1 +86.92 1 +86.93 1 +86.94 1 +86.95 1 +86.97 1 +87.00 1 +87.01 1 +87.02 1 +87.04 1 +87.05 2 +87.06 1 +87.07 1 +87.10 1 +87.11 1 +87.12 1 +87.16 1 +87.17 2 +87.18 1 +87.19 2 +87.21 2 +87.24 1 +87.28 1 +87.29 3 +87.30 1 +87.31 1 +87.32 2 +87.34 1 +87.36 1 +87.37 1 +87.38 2 +87.41 3 +87.42 1 +87.43 2 +87.44 1 +87.47 2 +87.48 1 +87.49 1 +87.50 2 +87.52 5 +87.56 1 +87.57 2 +87.58 2 +87.59 1 +87.60 2 +87.61 1 +87.62 1 +87.63 1 +87.65 2 +87.66 1 +87.67 3 +87.69 1 +87.70 1 +87.71 2 +87.72 1 +87.73 1 +87.75 1 +87.76 2 +87.78 2 +87.79 1 +87.80 1 +87.83 1 +87.84 2 +87.86 1 +87.89 2 +87.90 1 +87.91 1 +87.92 1 +87.93 1 +87.95 1 +87.96 2 +87.97 3 +87.99 1 +88.01 1 +88.03 1 +88.04 1 +88.05 2 +88.06 2 +88.08 2 +88.09 2 +88.10 3 +88.11 4 +88.12 1 +88.13 1 +88.14 1 +88.16 1 +88.18 1 +88.19 2 +88.20 1 +88.23 3 +88.24 1 +88.25 1 +88.29 2 +88.30 1 +88.31 2 +88.34 1 +88.35 1 +88.38 3 +88.39 2 +88.42 1 +88.45 2 +88.46 3 +88.50 2 +88.51 2 +88.52 1 +88.55 2 +88.56 1 +88.58 1 +88.60 3 +88.61 1 +88.64 1 +88.65 2 +88.67 1 +88.68 1 +88.69 3 +88.73 1 +88.74 2 +88.75 3 +88.78 2 +88.79 2 +88.80 3 +88.81 1 +88.83 3 +88.87 1 +88.88 1 +88.90 1 +88.92 1 +88.93 1 +88.94 2 +88.95 1 +88.96 1 +88.99 2 +89.00 3 +89.02 1 +89.05 1 +89.06 1 +89.08 1 +89.09 1 +89.11 1 +89.12 1 +89.14 5 +89.16 2 +89.18 2 +89.20 1 +89.23 3 +89.24 1 +89.25 2 +89.26 1 +89.28 1 +89.29 1 +89.31 2 +89.32 3 +89.34 1 +89.35 3 +89.38 1 +89.39 1 +89.40 4 +89.41 1 +89.46 1 +89.49 1 +89.50 2 +89.51 1 +89.52 3 +89.53 2 +89.54 3 +89.55 1 +89.58 2 +89.60 4 +89.61 1 +89.62 3 +89.64 1 +89.66 2 +89.67 3 +89.71 1 +89.73 1 +89.75 1 +89.77 3 +89.79 2 +89.80 1 +89.81 1 +89.84 2 +89.86 1 +89.87 3 +89.89 1 +89.92 1 +89.93 1 +89.94 1 +89.95 2 +89.97 2 +89.98 1 +89.99 1 +9.00 1 +9.01 2 +9.02 5 +9.04 1 +9.05 2 +9.06 1 +9.07 2 +9.08 1 +9.09 1 +9.10 2 +9.11 1 +9.12 1 +9.13 1 +9.14 1 +9.15 1 +9.17 1 +9.18 2 +9.20 1 +9.24 2 +9.25 1 +9.29 2 +9.33 1 +9.35 2 +9.37 2 +9.39 1 +9.42 1 +9.43 1 +9.44 1 +9.45 1 +9.46 1 +9.47 3 +9.49 2 +9.50 1 +9.52 1 +9.53 1 +9.55 1 +9.56 1 +9.58 2 +9.59 2 +9.60 1 +9.63 1 +9.64 1 +9.67 1 +9.71 1 +9.73 1 +9.74 1 +9.76 1 +9.77 1 +9.78 1 +9.82 2 +9.84 2 +9.85 1 +9.86 1 +9.87 1 +9.88 2 +9.89 2 +9.91 1 +9.92 3 +9.93 1 +9.94 1 +9.95 1 +9.96 1 +9.97 1 +9.98 2 +9.99 2 +90.01 3 +90.02 3 +90.05 1 +90.08 2 +90.09 2 +90.13 1 +90.15 2 +90.16 2 +90.17 1 +90.19 2 +90.21 1 +90.22 1 +90.24 1 +90.26 2 +90.27 1 +90.28 1 +90.29 1 +90.30 2 +90.31 2 +90.33 1 +90.34 2 +90.36 3 +90.37 2 +90.38 5 +90.39 2 +90.40 2 +90.42 1 +90.43 1 +90.45 2 +90.46 1 +90.47 1 +90.50 1 +90.51 2 +90.53 1 +90.55 2 +90.56 3 +90.58 1 +90.59 1 +90.62 1 +90.64 1 +90.65 1 +90.66 2 +90.68 1 +90.69 2 +90.70 2 +90.71 1 +90.72 1 +90.73 1 +90.74 2 +90.75 1 +90.77 1 +90.78 1 +90.79 1 +90.82 1 +90.83 2 +90.84 1 +90.85 1 +90.87 1 +90.88 2 +90.89 2 +90.90 2 +90.91 2 +90.93 1 +90.95 2 +90.98 1 +91.00 2 +91.03 3 +91.05 1 +91.07 1 +91.08 1 +91.09 3 +91.10 1 +91.11 2 +91.12 2 +91.13 1 +91.15 1 +91.16 1 +91.19 2 +91.22 1 +91.28 1 +91.30 1 +91.31 1 +91.33 2 +91.34 1 +91.35 2 +91.37 3 +91.38 2 +91.40 1 +91.41 1 +91.42 1 +91.43 2 +91.44 1 +91.45 1 +91.46 2 +91.47 1 +91.49 1 +91.51 4 +91.52 1 +91.54 1 +91.56 3 +91.57 1 +91.58 1 +91.59 1 +91.60 1 +91.66 1 +91.67 1 +91.68 1 +91.69 1 +91.70 1 +91.71 1 +91.72 1 +91.75 1 +91.76 1 +91.77 1 +91.80 1 +91.81 1 +91.85 2 +91.87 1 +91.89 1 +91.90 2 +91.93 1 +91.94 1 +91.96 1 +91.98 1 +92.01 2 +92.03 1 +92.05 1 +92.06 1 +92.08 1 +92.13 1 +92.14 1 +92.16 1 +92.17 1 +92.18 1 +92.21 1 +92.22 3 +92.23 1 +92.24 2 +92.27 1 +92.28 1 +92.29 1 +92.30 1 +92.31 1 +92.32 1 +92.34 1 +92.35 1 +92.36 1 +92.38 1 +92.39 1 +92.40 1 +92.41 1 +92.42 1 +92.44 1 +92.47 2 +92.48 1 +92.50 2 +92.52 1 +92.53 1 +92.56 2 +92.57 1 +92.59 2 +92.60 1 +92.61 2 +92.63 2 +92.64 1 +92.67 1 +92.71 2 +92.72 2 +92.73 1 +92.74 3 +92.75 1 +92.78 1 +92.79 2 +92.81 2 +92.82 2 +92.86 1 +92.88 1 +92.89 1 +92.90 1 +92.91 1 +92.92 2 +92.93 2 +92.94 2 +92.95 3 +92.97 3 +92.99 1 +93.00 1 +93.01 3 +93.03 3 +93.05 3 +93.08 1 +93.09 2 +93.10 1 +93.11 2 +93.13 1 +93.16 2 +93.17 2 +93.18 1 +93.19 1 +93.21 2 +93.22 1 +93.23 1 +93.24 3 +93.26 1 +93.27 1 +93.28 1 +93.29 2 +93.30 2 +93.31 1 +93.32 1 +93.35 1 +93.36 1 +93.38 1 +93.39 1 +93.40 1 +93.41 3 +93.43 2 +93.45 1 +93.46 2 +93.48 1 +93.49 3 +93.50 1 +93.52 2 +93.54 1 +93.55 1 +93.57 2 +93.58 1 +93.60 1 +93.62 2 +93.63 1 +93.64 1 +93.65 2 +93.66 2 +93.68 1 +93.69 1 +93.70 2 +93.71 1 +93.73 2 +93.74 1 +93.77 2 +93.78 1 +93.79 3 +93.81 1 +93.83 2 +93.84 2 +93.85 2 +93.86 1 +93.87 1 +93.89 1 +93.90 1 +93.93 2 +93.95 3 +93.96 1 +93.97 3 +93.98 2 +93.99 2 +94.01 1 +94.02 1 +94.03 4 +94.04 1 +94.05 1 +94.06 1 +94.09 2 +94.11 1 +94.12 1 +94.13 3 +94.14 1 +94.16 2 +94.17 1 +94.23 2 +94.25 1 +94.26 1 +94.30 4 +94.32 1 +94.33 1 +94.34 2 +94.35 2 +94.39 2 +94.42 1 +94.43 1 +94.44 3 +94.45 1 +94.46 2 +94.47 6 +94.48 1 +94.49 1 +94.50 1 +94.51 1 +94.52 1 +94.53 1 +94.56 1 +94.58 2 +94.59 2 +94.60 3 +94.61 2 +94.62 1 +94.63 4 +94.68 1 +94.69 2 +94.71 1 +94.73 1 +94.74 1 +94.75 1 +94.77 2 +94.78 1 +94.80 1 +94.82 2 +94.83 2 +94.85 1 +94.86 2 +94.87 1 +94.88 2 +94.89 1 +94.90 2 +94.91 3 +94.92 1 +94.95 1 +94.97 1 +94.99 1 +95.02 1 +95.05 1 +95.06 2 +95.07 3 +95.08 1 +95.09 1 +95.10 1 +95.11 2 +95.12 4 +95.13 2 +95.14 2 +95.15 1 +95.16 1 +95.20 2 +95.21 1 +95.23 1 +95.24 1 +95.26 1 +95.28 1 +95.32 1 +95.33 1 +95.34 2 +95.37 1 +95.38 1 +95.39 1 +95.40 2 +95.41 1 +95.43 1 +95.45 3 +95.46 4 +95.48 2 +95.51 1 +95.52 1 +95.54 2 +95.56 2 +95.57 1 +95.58 2 +95.59 1 +95.61 3 +95.62 1 +95.63 2 +95.64 1 +95.65 1 +95.67 3 +95.68 1 +95.69 4 +95.70 1 +95.71 2 +95.72 1 +95.74 1 +95.79 1 +95.80 2 +95.81 3 +95.83 2 +95.84 1 +95.87 3 +95.88 1 +95.89 3 +95.90 2 +95.93 1 +95.94 3 +95.99 1 +96.01 1 +96.02 1 +96.04 1 +96.05 2 +96.06 2 +96.07 2 +96.08 1 +96.09 1 +96.10 2 +96.13 2 +96.20 2 +96.21 1 +96.22 1 +96.23 1 +96.24 1 +96.25 1 +96.26 1 +96.28 2 +96.30 2 +96.31 1 +96.32 1 +96.34 2 +96.35 2 +96.36 1 +96.38 1 +96.39 2 +96.42 1 +96.43 1 +96.44 1 +96.45 5 +96.46 1 +96.47 1 +96.48 2 +96.50 1 +96.51 1 +96.52 1 +96.53 2 +96.54 1 +96.55 1 +96.57 1 +96.58 1 +96.59 1 +96.60 4 +96.61 1 +96.62 2 +96.63 1 +96.64 2 +96.66 2 +96.68 2 +96.69 2 +96.71 2 +96.72 1 +96.74 2 +96.76 4 +96.77 1 +96.79 1 +96.80 2 +96.81 2 +96.82 1 +96.83 1 +96.85 2 +96.87 1 +96.89 3 +96.90 2 +96.92 1 +96.95 2 +96.96 1 +96.97 1 +96.98 2 +96.99 1 +97.01 1 +97.03 2 +97.07 1 +97.09 1 +97.10 2 +97.11 3 +97.15 2 +97.16 1 +97.18 2 +97.20 2 +97.21 1 +97.22 2 +97.24 1 +97.25 2 +97.26 1 +97.28 2 +97.29 1 +97.31 1 +97.32 2 +97.34 1 +97.35 1 +97.37 2 +97.38 2 +97.39 1 +97.41 2 +97.42 4 +97.43 1 +97.48 2 +97.49 2 +97.50 2 +97.52 2 +97.53 1 +97.55 1 +97.56 4 +97.57 1 +97.59 1 +97.60 1 +97.62 1 +97.64 1 +97.65 1 +97.66 1 +97.68 1 +97.69 2 +97.70 2 +97.72 1 +97.74 3 +97.75 1 +97.76 1 +97.77 1 +97.78 1 +97.81 2 +97.82 1 +97.84 1 +97.85 1 +97.86 2 +97.87 1 +97.88 1 +97.91 1 +97.94 2 +97.96 1 +97.98 2 +97.99 2 +98.00 1 +98.01 1 +98.03 1 +98.04 1 +98.06 3 +98.07 2 +98.08 1 +98.10 3 +98.11 3 +98.12 1 +98.13 3 +98.15 3 +98.16 1 +98.18 1 +98.20 1 +98.22 1 +98.23 1 +98.24 2 +98.25 3 +98.26 2 +98.28 2 +98.29 2 +98.32 1 +98.33 1 +98.35 2 +98.36 2 +98.38 2 +98.39 1 +98.41 1 +98.42 3 +98.43 1 +98.44 1 +98.46 1 +98.48 2 +98.50 1 +98.51 3 +98.52 1 +98.53 1 +98.54 1 +98.56 1 +98.58 1 +98.60 1 +98.63 1 +98.64 2 +98.67 3 +98.68 2 +98.70 1 +98.72 1 +98.73 1 +98.76 2 +98.77 1 +98.79 1 +98.80 2 +98.82 1 +98.83 1 +98.84 1 +98.86 2 +98.87 2 +98.88 1 +98.89 1 +98.90 5 +98.91 1 +98.93 1 +98.97 1 +98.99 2 +99.00 3 +99.01 1 +99.02 1 +99.03 1 +99.04 1 +99.05 3 +99.06 1 +99.08 3 +99.10 1 +99.12 1 +99.13 1 +99.14 1 +99.16 1 +99.18 2 +99.19 2 +99.20 1 +99.21 1 +99.22 1 +99.23 1 +99.24 2 +99.27 2 +99.28 3 +99.29 1 +99.31 3 +99.32 1 +99.33 3 +99.34 2 +99.35 2 +99.37 3 +99.38 1 +99.40 1 +99.41 1 +99.42 3 +99.44 1 +99.46 2 +99.48 1 +99.50 4 +99.51 1 +99.52 1 +99.53 2 +99.54 1 +99.55 3 +99.56 2 +99.57 2 +99.58 3 +99.60 1 +99.62 1 +99.63 2 +99.64 2 +99.66 1 +99.67 1 +99.69 1 +99.76 2 +99.77 1 +99.78 1 +99.79 2 +99.80 1 +99.82 2 +99.84 1 +99.85 1 +99.86 3 +99.87 3 +99.88 1 +99.89 1 +99.90 2 +99.92 1 +99.94 2 +99.95 1 +99.96 1 +99.98 1 +99.99 1 +PREHOOK: query: explain vectorization expression +select `dec`, count(*) from over10k group by `dec` +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select `dec`, count(*) from over10k group by `dec` +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: dec (type: decimal(4,2)) + outputColumnNames: dec + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [9] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + aggregators: VectorUDAFCountStar(*) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2) + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: dec (type: decimal(4,2)) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Map-reduce partition columns: _col0 (type: decimal(4,2)) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: decimal(4,2)) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select `dec`, count(*) from over10k group by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select `dec`, count(*) from over10k group by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +0.01 2 +0.02 1 +0.03 2 +0.04 1 +0.05 1 +0.06 3 +0.07 1 +0.08 3 +0.10 1 +0.11 1 +0.15 1 +0.17 1 +0.21 2 +0.22 1 +0.23 2 +0.24 1 +0.25 2 +0.26 1 +0.27 1 +0.29 1 +0.30 2 +0.31 3 +0.32 1 +0.33 1 +0.34 3 +0.35 2 +0.36 1 +0.37 2 +0.38 3 +0.39 1 +0.40 2 +0.42 1 +0.45 2 +0.46 3 +0.47 1 +0.50 1 +0.51 1 +0.55 1 +0.56 4 +0.57 1 +0.58 1 +0.64 3 +0.66 1 +0.67 1 +0.68 1 +0.70 1 +0.72 1 +0.73 1 +0.75 1 +0.77 1 +0.79 1 +0.80 2 +0.81 1 +0.83 1 +0.84 1 +0.85 1 +0.86 1 +0.87 1 +0.88 1 +0.89 1 +0.90 1 +0.91 2 +0.92 2 +0.93 1 +0.94 1 +0.96 1 +0.97 4 +0.98 2 +0.99 2 +1.00 2 +1.01 2 +1.04 1 +1.05 1 +1.07 2 +1.10 2 +1.11 1 +1.12 1 +1.13 1 +1.15 1 +1.16 1 +1.17 1 +1.19 1 +1.20 2 +1.22 2 +1.23 2 +1.24 2 +1.25 3 +1.27 1 +1.29 3 +1.31 1 +1.32 1 +1.33 1 +1.34 2 +1.35 2 +1.36 2 +1.37 2 +1.38 2 +1.39 1 +1.40 2 +1.42 2 +1.43 3 +1.45 2 +1.46 1 +1.48 1 +1.49 3 +1.50 2 +1.52 1 +1.55 2 +1.56 2 +1.59 1 +1.62 1 +1.65 1 +1.66 1 +1.67 1 +1.68 2 +1.69 1 +1.70 1 +1.72 4 +1.74 1 +1.75 2 +1.76 2 +1.77 1 +1.78 1 +1.79 1 +1.81 2 +1.82 1 +1.85 1 +1.86 1 +1.89 1 +1.94 1 +1.97 3 +1.99 1 +10.00 2 +10.03 2 +10.04 1 +10.06 1 +10.07 1 +10.08 2 +10.11 3 +10.12 1 +10.14 2 +10.15 2 +10.16 4 +10.20 2 +10.22 1 +10.28 1 +10.29 1 +10.30 1 +10.32 4 +10.33 1 +10.34 1 +10.35 3 +10.36 2 +10.37 1 +10.38 1 +10.39 1 +10.40 3 +10.41 1 +10.42 2 +10.43 1 +10.44 1 +10.45 1 +10.47 2 +10.48 2 +10.50 1 +10.51 1 +10.52 4 +10.54 1 +10.56 2 +10.59 2 +10.60 1 +10.62 1 +10.63 3 +10.65 1 +10.66 1 +10.67 2 +10.69 1 +10.70 2 +10.71 1 +10.72 2 +10.74 3 +10.75 1 +10.76 2 +10.77 2 +10.78 3 +10.79 1 +10.80 1 +10.81 1 +10.82 1 +10.83 1 +10.86 4 +10.88 3 +10.90 1 +10.91 2 +10.93 2 +10.94 2 +10.95 1 +10.98 1 +10.99 2 +11.00 1 +11.02 2 +11.04 1 +11.06 2 +11.10 1 +11.11 1 +11.12 2 +11.14 1 +11.15 3 +11.16 3 +11.17 1 +11.19 1 +11.20 4 +11.21 3 +11.23 2 +11.24 2 +11.25 1 +11.26 1 +11.27 1 +11.28 2 +11.29 1 +11.31 2 +11.32 2 +11.34 2 +11.36 1 +11.37 6 +11.38 2 +11.40 3 +11.41 1 +11.43 2 +11.44 1 +11.45 2 +11.46 1 +11.54 1 +11.56 2 +11.58 1 +11.61 1 +11.62 2 +11.63 2 +11.64 1 +11.65 3 +11.66 2 +11.67 1 +11.69 4 +11.70 2 +11.71 2 +11.73 1 +11.74 1 +11.76 1 +11.77 1 +11.78 3 +11.80 1 +11.81 3 +11.82 1 +11.83 1 +11.86 1 +11.87 5 +11.90 2 +11.91 2 +11.93 3 +11.95 1 +11.99 2 +12.03 2 +12.04 1 +12.05 1 +12.07 3 +12.08 2 +12.10 1 +12.11 2 +12.12 1 +12.13 1 +12.14 1 +12.15 1 +12.18 1 +12.22 1 +12.23 1 +12.26 1 +12.27 1 +12.30 2 +12.31 1 +12.34 1 +12.36 2 +12.38 1 +12.39 1 +12.40 1 +12.41 1 +12.43 1 +12.44 1 +12.45 1 +12.47 1 +12.48 2 +12.50 1 +12.51 1 +12.53 2 +12.54 1 +12.55 2 +12.56 1 +12.58 1 +12.59 2 +12.60 1 +12.61 3 +12.63 1 +12.65 2 +12.66 1 +12.67 3 +12.71 1 +12.72 3 +12.73 1 +12.75 1 +12.76 3 +12.77 3 +12.78 2 +12.79 1 +12.81 1 +12.83 1 +12.86 1 +12.87 1 +12.92 5 +12.95 1 +12.98 1 +12.99 2 +13.01 1 +13.03 1 +13.04 4 +13.05 1 +13.06 2 +13.07 2 +13.08 2 +13.09 2 +13.10 2 +13.11 1 +13.12 1 +13.13 1 +13.15 2 +13.16 2 +13.18 1 +13.19 1 +13.20 2 +13.21 1 +13.23 1 +13.25 2 +13.26 1 +13.27 1 +13.28 1 +13.29 1 +13.30 1 +13.32 2 +13.33 1 +13.34 1 +13.35 1 +13.36 1 +13.38 2 +13.40 1 +13.41 1 +13.43 1 +13.44 1 +13.45 1 +13.47 1 +13.49 2 +13.51 1 +13.56 2 +13.58 1 +13.59 1 +13.60 1 +13.61 1 +13.68 1 +13.70 1 +13.71 1 +13.72 1 +13.75 1 +13.77 1 +13.78 2 +13.80 1 +13.81 3 +13.82 1 +13.85 1 +13.86 2 +13.88 1 +13.89 2 +13.90 1 +13.91 2 +13.92 2 +13.93 1 +13.94 2 +13.95 1 +13.96 2 +13.97 3 +14.00 2 +14.01 3 +14.03 1 +14.04 2 +14.07 1 +14.08 1 +14.09 2 +14.10 2 +14.12 2 +14.14 3 +14.16 2 +14.17 1 +14.18 1 +14.19 1 +14.21 2 +14.23 2 +14.24 2 +14.26 1 +14.27 1 +14.28 3 +14.32 3 +14.33 2 +14.35 5 +14.36 1 +14.38 1 +14.39 2 +14.40 3 +14.41 1 +14.42 2 +14.46 1 +14.47 1 +14.48 1 +14.49 2 +14.52 1 +14.54 1 +14.55 3 +14.56 1 +14.57 1 +14.58 1 +14.59 1 +14.63 1 +14.64 1 +14.65 1 +14.66 2 +14.68 2 +14.69 4 +14.71 1 +14.73 1 +14.74 2 +14.75 1 +14.76 1 +14.78 2 +14.80 1 +14.81 1 +14.83 2 +14.85 1 +14.88 2 +14.89 1 +14.91 3 +14.92 1 +14.94 3 +14.96 1 +15.01 1 +15.02 1 +15.07 1 +15.09 1 +15.13 1 +15.14 2 +15.15 1 +15.16 1 +15.17 1 +15.19 2 +15.20 2 +15.22 5 +15.23 2 +15.26 2 +15.27 1 +15.28 2 +15.29 1 +15.30 2 +15.31 1 +15.32 1 +15.35 1 +15.36 1 +15.37 1 +15.38 2 +15.40 1 +15.42 3 +15.43 1 +15.46 1 +15.47 2 +15.50 1 +15.52 1 +15.54 3 +15.55 2 +15.57 1 +15.58 1 +15.60 3 +15.63 1 +15.65 2 +15.67 2 +15.69 2 +15.70 4 +15.75 1 +15.76 1 +15.79 1 +15.80 1 +15.81 2 +15.82 2 +15.83 2 +15.85 1 +15.87 2 +15.88 1 +15.89 2 +15.90 2 +15.96 1 +15.98 2 +15.99 2 +16.00 2 +16.01 1 +16.03 1 +16.05 2 +16.06 3 +16.09 1 +16.10 1 +16.11 1 +16.13 1 +16.14 1 +16.15 3 +16.17 2 +16.18 2 +16.19 1 +16.21 1 +16.22 1 +16.23 1 +16.24 1 +16.25 1 +16.26 3 +16.27 3 +16.29 1 +16.32 1 +16.34 1 +16.35 1 +16.38 1 +16.39 3 +16.40 1 +16.41 1 +16.43 1 +16.48 1 +16.49 1 +16.51 1 +16.53 1 +16.54 1 +16.55 1 +16.57 2 +16.58 1 +16.59 3 +16.60 2 +16.61 1 +16.63 2 +16.66 1 +16.67 1 +16.68 2 +16.70 1 +16.72 1 +16.74 1 +16.75 1 +16.76 1 +16.77 1 +16.79 1 +16.81 1 +16.84 1 +16.85 2 +16.86 2 +16.87 5 +16.89 2 +16.91 1 +16.92 1 +16.93 2 +16.94 1 +16.95 1 +17.00 1 +17.01 1 +17.02 1 +17.03 1 +17.05 2 +17.06 1 +17.07 2 +17.08 1 +17.09 4 +17.12 1 +17.13 2 +17.15 1 +17.17 3 +17.19 2 +17.20 1 +17.21 1 +17.23 3 +17.25 1 +17.26 1 +17.27 3 +17.29 2 +17.31 1 +17.33 1 +17.35 1 +17.38 1 +17.39 1 +17.41 1 +17.42 2 +17.45 1 +17.47 1 +17.48 1 +17.49 1 +17.50 1 +17.52 2 +17.53 1 +17.57 1 +17.58 2 +17.59 1 +17.60 1 +17.61 1 +17.62 1 +17.64 2 +17.65 1 +17.66 1 +17.67 1 +17.68 4 +17.71 1 +17.72 1 +17.73 1 +17.74 1 +17.75 2 +17.77 2 +17.78 1 +17.79 2 +17.83 1 +17.84 1 +17.86 1 +17.87 1 +17.89 1 +17.92 2 +17.93 1 +17.95 1 +17.98 1 +18.00 2 +18.01 1 +18.02 3 +18.03 2 +18.04 1 +18.05 1 +18.06 1 +18.08 4 +18.10 1 +18.11 3 +18.12 4 +18.13 1 +18.14 5 +18.15 2 +18.16 1 +18.17 1 +18.18 1 +18.19 1 +18.20 3 +18.21 3 +18.23 2 +18.24 1 +18.25 1 +18.27 1 +18.28 2 +18.30 2 +18.31 1 +18.32 1 +18.35 2 +18.36 1 +18.37 2 +18.38 2 +18.39 1 +18.42 2 +18.43 1 +18.46 1 +18.47 1 +18.49 2 +18.50 1 +18.51 2 +18.52 1 +18.55 1 +18.56 1 +18.57 2 +18.64 1 +18.65 1 +18.66 1 +18.67 1 +18.68 4 +18.69 3 +18.70 2 +18.73 1 +18.74 2 +18.75 1 +18.76 1 +18.77 1 +18.78 1 +18.79 1 +18.80 2 +18.83 1 +18.85 2 +18.86 2 +18.88 3 +18.89 1 +18.90 1 +18.94 1 +18.96 1 +18.98 1 +19.00 1 +19.01 2 +19.02 1 +19.03 1 +19.04 2 +19.06 1 +19.07 2 +19.08 2 +19.10 2 +19.11 3 +19.13 2 +19.14 4 +19.15 1 +19.16 1 +19.17 2 +19.18 2 +19.19 2 +19.21 1 +19.24 2 +19.26 2 +19.27 1 +19.28 2 +19.30 4 +19.31 1 +19.32 1 +19.33 1 +19.34 1 +19.37 1 +19.42 2 +19.43 1 +19.44 2 +19.45 1 +19.46 2 +19.47 2 +19.51 1 +19.53 2 +19.54 1 +19.55 3 +19.57 1 +19.58 2 +19.60 1 +19.61 3 +19.62 1 +19.63 2 +19.64 3 +19.65 1 +19.68 3 +19.69 1 +19.71 1 +19.72 1 +19.73 1 +19.77 2 +19.78 3 +19.79 1 +19.81 1 +19.82 1 +19.83 1 +19.84 4 +19.85 1 +19.87 3 +19.88 2 +19.89 1 +19.90 1 +19.91 2 +19.93 3 +19.95 3 +19.96 1 +19.97 1 +19.99 1 +2.03 1 +2.04 2 +2.06 2 +2.07 3 +2.08 1 +2.10 2 +2.11 1 +2.14 1 +2.15 3 +2.17 1 +2.19 1 +2.20 1 +2.21 1 +2.22 1 +2.25 1 +2.26 2 +2.29 2 +2.32 1 +2.33 1 +2.35 2 +2.36 2 +2.37 2 +2.41 1 +2.45 1 +2.46 1 +2.48 3 +2.51 3 +2.52 1 +2.54 1 +2.55 1 +2.56 2 +2.57 1 +2.61 2 +2.62 1 +2.63 2 +2.64 1 +2.66 2 +2.68 3 +2.70 2 +2.72 3 +2.75 2 +2.76 1 +2.77 2 +2.80 2 +2.81 1 +2.82 5 +2.83 2 +2.84 2 +2.86 1 +2.87 2 +2.89 1 +2.90 1 +2.92 1 +2.93 1 +2.95 1 +2.96 2 +2.97 2 +2.98 2 +2.99 1 +20.00 1 +20.02 1 +20.06 1 +20.08 1 +20.09 1 +20.11 1 +20.12 1 +20.13 1 +20.14 2 +20.15 1 +20.18 1 +20.21 1 +20.23 2 +20.24 1 +20.29 1 +20.30 1 +20.31 2 +20.32 1 +20.33 1 +20.34 1 +20.35 3 +20.37 1 +20.39 2 +20.40 1 +20.43 2 +20.44 3 +20.46 1 +20.50 1 +20.51 3 +20.52 1 +20.53 1 +20.54 1 +20.56 1 +20.57 1 +20.60 1 +20.61 3 +20.63 1 +20.65 3 +20.67 1 +20.68 2 +20.69 3 +20.72 1 +20.80 1 +20.81 1 +20.84 1 +20.85 2 +20.88 2 +20.90 2 +20.91 1 +20.93 1 +20.94 3 +20.95 3 +20.97 3 +20.98 1 +20.99 1 +21.01 1 +21.02 1 +21.04 1 +21.06 1 +21.08 1 +21.09 1 +21.10 1 +21.11 2 +21.13 1 +21.14 2 +21.15 4 +21.19 2 +21.21 1 +21.23 1 +21.24 1 +21.25 1 +21.26 2 +21.27 1 +21.28 3 +21.29 4 +21.34 1 +21.35 1 +21.36 2 +21.37 1 +21.39 1 +21.40 1 +21.41 2 +21.42 1 +21.43 3 +21.44 2 +21.45 2 +21.46 1 +21.48 1 +21.49 3 +21.50 2 +21.55 4 +21.56 2 +21.59 4 +21.60 3 +21.61 2 +21.62 2 +21.63 1 +21.65 1 +21.66 2 +21.67 1 +21.68 1 +21.69 2 +21.70 3 +21.71 1 +21.72 1 +21.73 1 +21.75 1 +21.76 1 +21.78 1 +21.79 5 +21.80 1 +21.81 4 +21.83 1 +21.84 3 +21.89 2 +21.92 2 +21.93 3 +21.94 2 +21.96 2 +21.97 1 +21.98 1 +21.99 1 +22.00 1 +22.01 1 +22.02 2 +22.03 3 +22.04 1 +22.05 1 +22.07 2 +22.08 1 +22.11 1 +22.13 2 +22.14 2 +22.15 2 +22.16 3 +22.18 1 +22.19 1 +22.22 1 +22.23 1 +22.24 1 +22.25 1 +22.30 1 +22.32 1 +22.34 3 +22.35 1 +22.38 1 +22.40 1 +22.44 3 +22.45 1 +22.47 1 +22.48 1 +22.49 1 +22.53 1 +22.59 3 +22.60 2 +22.62 2 +22.64 1 +22.66 4 +22.67 1 +22.68 3 +22.69 1 +22.70 1 +22.71 2 +22.72 3 +22.73 3 +22.76 2 +22.77 2 +22.80 1 +22.81 1 +22.85 1 +22.86 1 +22.88 1 +22.93 1 +22.94 2 +22.95 1 +22.99 1 +23.01 2 +23.03 1 +23.05 1 +23.06 1 +23.07 3 +23.08 1 +23.09 3 +23.11 1 +23.14 2 +23.17 1 +23.18 1 +23.19 2 +23.20 2 +23.21 1 +23.22 1 +23.23 1 +23.24 1 +23.28 1 +23.32 1 +23.33 1 +23.34 2 +23.35 2 +23.37 1 +23.38 1 +23.40 1 +23.41 1 +23.42 2 +23.43 1 +23.44 2 +23.45 2 +23.46 1 +23.47 1 +23.48 1 +23.51 1 +23.53 4 +23.55 4 +23.57 2 +23.58 1 +23.59 1 +23.60 2 +23.61 2 +23.63 3 +23.65 1 +23.67 3 +23.68 1 +23.72 1 +23.73 1 +23.74 1 +23.76 2 +23.78 2 +23.79 2 +23.82 2 +23.84 3 +23.85 2 +23.89 1 +23.90 1 +23.91 3 +23.93 2 +23.94 2 +23.95 2 +23.99 2 +24.02 1 +24.03 1 +24.04 4 +24.05 1 +24.06 1 +24.07 1 +24.08 1 +24.10 1 +24.11 1 +24.14 2 +24.15 1 +24.16 1 +24.19 2 +24.20 2 +24.21 1 +24.23 1 +24.24 1 +24.25 1 +24.26 2 +24.30 2 +24.32 1 +24.33 1 +24.34 1 +24.36 3 +24.37 1 +24.38 1 +24.39 1 +24.40 2 +24.44 1 +24.45 1 +24.47 1 +24.48 4 +24.49 2 +24.53 1 +24.54 1 +24.55 1 +24.56 1 +24.60 1 +24.61 1 +24.62 2 +24.63 1 +24.64 1 +24.65 2 +24.68 1 +24.69 2 +24.70 2 +24.71 1 +24.73 1 +24.76 1 +24.78 1 +24.79 1 +24.80 1 +24.82 1 +24.83 1 +24.84 3 +24.86 1 +24.87 5 +24.89 1 +24.90 2 +24.92 2 +24.94 2 +24.96 1 +24.97 2 +24.98 2 +24.99 1 +25.00 2 +25.04 1 +25.06 1 +25.08 1 +25.10 2 +25.12 1 +25.13 2 +25.14 1 +25.15 1 +25.17 2 +25.18 3 +25.19 2 +25.20 1 +25.21 2 +25.22 1 +25.23 1 +25.24 1 +25.25 2 +25.27 1 +25.28 1 +25.29 2 +25.31 4 +25.35 1 +25.36 2 +25.37 1 +25.38 2 +25.39 1 +25.40 1 +25.41 4 +25.42 1 +25.43 1 +25.47 1 +25.50 1 +25.51 2 +25.52 2 +25.53 1 +25.56 2 +25.57 2 +25.58 3 +25.62 2 +25.63 1 +25.64 1 +25.65 1 +25.66 1 +25.67 2 +25.68 1 +25.69 1 +25.70 1 +25.71 5 +25.72 1 +25.73 1 +25.74 1 +25.76 1 +25.77 2 +25.80 1 +25.83 2 +25.84 2 +25.86 1 +25.88 2 +25.89 2 +25.90 1 +25.91 1 +25.92 1 +25.94 2 +25.95 1 +25.97 1 +25.98 1 +25.99 1 +26.00 1 +26.01 3 +26.02 2 +26.03 1 +26.04 1 +26.05 1 +26.06 2 +26.09 1 +26.10 1 +26.11 2 +26.12 1 +26.13 1 +26.16 1 +26.17 1 +26.19 1 +26.20 2 +26.21 2 +26.22 2 +26.24 1 +26.27 1 +26.31 1 +26.32 3 +26.33 1 +26.34 1 +26.35 1 +26.36 1 +26.37 1 +26.40 2 +26.41 1 +26.42 1 +26.43 1 +26.44 1 +26.46 2 +26.54 1 +26.56 1 +26.57 2 +26.60 1 +26.62 2 +26.64 1 +26.65 1 +26.66 1 +26.67 1 +26.68 1 +26.70 1 +26.73 1 +26.74 2 +26.77 1 +26.78 2 +26.79 1 +26.80 3 +26.81 1 +26.83 1 +26.85 2 +26.86 1 +26.88 1 +26.89 1 +26.91 1 +26.93 1 +26.94 2 +26.95 1 +26.96 2 +26.97 1 +26.98 1 +26.99 2 +27.00 1 +27.01 3 +27.02 1 +27.03 1 +27.04 4 +27.05 1 +27.06 1 +27.07 3 +27.12 4 +27.13 2 +27.14 1 +27.18 1 +27.25 1 +27.26 1 +27.27 1 +27.32 1 +27.33 2 +27.34 2 +27.35 3 +27.36 1 +27.38 1 +27.39 2 +27.43 1 +27.44 1 +27.45 1 +27.47 1 +27.48 1 +27.49 1 +27.51 1 +27.52 2 +27.53 3 +27.55 1 +27.56 1 +27.57 4 +27.58 3 +27.59 2 +27.60 1 +27.62 2 +27.63 3 +27.64 1 +27.67 1 +27.68 2 +27.71 1 +27.72 2 +27.75 1 +27.76 3 +27.79 1 +27.80 3 +27.81 1 +27.82 1 +27.83 1 +27.84 1 +27.86 1 +27.87 1 +27.88 1 +27.89 3 +27.90 1 +27.92 1 +27.95 1 +27.96 2 +27.99 1 +28.00 1 +28.02 1 +28.03 2 +28.04 2 +28.05 2 +28.07 3 +28.08 2 +28.09 3 +28.10 1 +28.11 2 +28.14 2 +28.15 2 +28.17 2 +28.18 1 +28.20 2 +28.22 2 +28.23 1 +28.25 2 +28.28 2 +28.29 1 +28.30 1 +28.31 4 +28.33 3 +28.34 1 +28.35 2 +28.37 1 +28.40 2 +28.41 2 +28.42 1 +28.43 1 +28.44 1 +28.47 2 +28.48 1 +28.49 2 +28.51 1 +28.53 1 +28.54 2 +28.55 2 +28.60 1 +28.61 1 +28.62 2 +28.63 2 +28.64 1 +28.65 1 +28.67 1 +28.68 1 +28.69 2 +28.71 2 +28.72 1 +28.74 2 +28.76 2 +28.78 1 +28.80 1 +28.81 1 +28.84 1 +28.85 1 +28.87 1 +28.90 2 +28.91 2 +28.92 3 +28.93 3 +28.94 1 +28.95 1 +28.96 1 +28.98 1 +29.00 1 +29.01 2 +29.02 1 +29.03 1 +29.04 1 +29.06 2 +29.07 1 +29.09 1 +29.14 1 +29.15 1 +29.19 2 +29.21 1 +29.23 1 +29.26 1 +29.27 3 +29.28 1 +29.30 1 +29.31 1 +29.32 2 +29.34 1 +29.36 2 +29.37 1 +29.38 1 +29.39 1 +29.40 1 +29.41 1 +29.42 1 +29.43 1 +29.48 2 +29.49 4 +29.53 1 +29.54 3 +29.55 1 +29.57 3 +29.58 1 +29.59 3 +29.60 1 +29.61 1 +29.62 2 +29.63 1 +29.64 2 +29.65 2 +29.66 2 +29.67 1 +29.68 3 +29.70 1 +29.72 1 +29.73 1 +29.74 2 +29.75 3 +29.79 1 +29.83 1 +29.84 1 +29.85 1 +29.86 1 +29.87 1 +29.88 1 +29.89 1 +29.90 1 +29.92 2 +29.93 1 +29.95 1 +29.96 1 +29.97 1 +29.99 3 +3.00 2 +3.01 2 +3.04 3 +3.07 3 +3.08 2 +3.09 1 +3.11 2 +3.12 1 +3.13 3 +3.14 1 +3.16 2 +3.18 2 +3.22 2 +3.25 1 +3.27 2 +3.29 2 +3.31 1 +3.34 1 +3.36 3 +3.37 2 +3.39 2 +3.40 1 +3.41 1 +3.43 2 +3.44 1 +3.45 2 +3.46 3 +3.47 2 +3.48 1 +3.49 1 +3.51 4 +3.52 2 +3.53 1 +3.54 2 +3.55 1 +3.58 3 +3.59 2 +3.60 3 +3.61 2 +3.64 1 +3.65 2 +3.68 2 +3.69 2 +3.73 2 +3.74 1 +3.75 2 +3.78 1 +3.79 1 +3.81 3 +3.83 1 +3.84 3 +3.85 1 +3.86 1 +3.87 4 +3.88 3 +3.90 1 +3.91 2 +3.93 3 +3.94 1 +3.95 2 +3.98 4 +30.00 2 +30.01 5 +30.02 2 +30.05 1 +30.06 2 +30.07 1 +30.08 2 +30.10 1 +30.11 2 +30.15 1 +30.16 1 +30.17 1 +30.18 1 +30.20 2 +30.21 1 +30.23 3 +30.24 1 +30.25 3 +30.26 1 +30.27 3 +30.30 1 +30.32 1 +30.33 2 +30.34 1 +30.35 2 +30.37 3 +30.38 1 +30.40 1 +30.41 1 +30.42 2 +30.43 2 +30.45 2 +30.47 1 +30.49 3 +30.54 1 +30.55 2 +30.57 3 +30.58 2 +30.59 3 +30.60 3 +30.65 2 +30.66 1 +30.67 3 +30.68 2 +30.70 2 +30.71 1 +30.72 1 +30.74 1 +30.76 1 +30.77 3 +30.78 1 +30.79 1 +30.80 3 +30.81 3 +30.82 1 +30.83 1 +30.84 2 +30.85 1 +30.86 1 +30.89 1 +30.91 1 +30.93 1 +30.94 3 +30.96 3 +30.97 1 +30.98 1 +30.99 1 +31.01 1 +31.02 2 +31.03 3 +31.04 1 +31.06 1 +31.07 2 +31.09 1 +31.10 2 +31.11 1 +31.12 1 +31.13 1 +31.16 1 +31.17 2 +31.18 1 +31.19 1 +31.20 1 +31.22 3 +31.23 1 +31.24 2 +31.25 1 +31.27 4 +31.28 1 +31.29 4 +31.30 1 +31.31 1 +31.32 1 +31.34 2 +31.35 3 +31.36 1 +31.37 1 +31.41 2 +31.42 3 +31.43 2 +31.44 2 +31.46 1 +31.48 2 +31.49 3 +31.50 2 +31.54 1 +31.56 3 +31.58 2 +31.59 1 +31.60 3 +31.63 1 +31.66 1 +31.68 2 +31.70 4 +31.71 2 +31.72 1 +31.73 1 +31.74 1 +31.75 2 +31.76 2 +31.78 1 +31.79 2 +31.83 3 +31.85 1 +31.87 2 +31.91 1 +31.92 1 +31.93 2 +31.94 1 +31.96 2 +31.98 1 +31.99 1 +32.01 2 +32.02 2 +32.03 2 +32.05 1 +32.06 2 +32.07 1 +32.08 2 +32.09 2 +32.10 3 +32.11 1 +32.12 2 +32.13 1 +32.15 3 +32.16 1 +32.17 1 +32.18 1 +32.20 1 +32.22 1 +32.25 1 +32.27 1 +32.28 1 +32.30 1 +32.31 1 +32.32 1 +32.33 1 +32.36 1 +32.37 1 +32.40 1 +32.41 3 +32.43 1 +32.44 1 +32.45 3 +32.47 1 +32.48 1 +32.52 1 +32.55 2 +32.56 2 +32.57 1 +32.60 1 +32.61 1 +32.63 1 +32.64 1 +32.65 2 +32.66 1 +32.68 2 +32.72 1 +32.73 2 +32.78 3 +32.79 1 +32.80 3 +32.83 1 +32.84 1 +32.85 2 +32.86 1 +32.88 1 +32.89 1 +32.92 1 +32.94 1 +32.95 2 +32.96 3 +32.97 2 +32.99 1 +33.00 3 +33.01 1 +33.03 5 +33.04 3 +33.05 1 +33.06 1 +33.07 3 +33.11 2 +33.12 2 +33.13 1 +33.14 1 +33.15 1 +33.17 1 +33.18 2 +33.20 2 +33.21 1 +33.22 3 +33.24 1 +33.25 1 +33.27 3 +33.29 3 +33.30 1 +33.31 1 +33.33 1 +33.35 2 +33.36 2 +33.37 3 +33.38 1 +33.39 1 +33.40 1 +33.41 2 +33.42 1 +33.45 2 +33.47 2 +33.49 1 +33.50 2 +33.51 2 +33.53 1 +33.55 1 +33.58 1 +33.59 1 +33.63 2 +33.64 1 +33.65 1 +33.66 2 +33.67 1 +33.68 1 +33.69 3 +33.70 1 +33.71 1 +33.72 3 +33.73 2 +33.75 1 +33.76 3 +33.77 4 +33.82 1 +33.83 1 +33.84 1 +33.87 1 +33.88 2 +33.89 2 +33.90 2 +33.92 2 +33.95 2 +33.99 1 +34.01 2 +34.02 1 +34.04 1 +34.07 1 +34.08 1 +34.10 1 +34.11 1 +34.12 1 +34.13 1 +34.14 1 +34.20 1 +34.22 4 +34.23 2 +34.27 2 +34.29 1 +34.30 1 +34.31 1 +34.32 1 +34.34 1 +34.37 1 +34.38 1 +34.40 2 +34.42 3 +34.44 1 +34.45 2 +34.47 1 +34.49 1 +34.50 1 +34.54 2 +34.55 2 +34.57 1 +34.58 2 +34.59 2 +34.61 2 +34.63 2 +34.64 1 +34.65 2 +34.66 1 +34.67 1 +34.70 2 +34.71 2 +34.72 2 +34.75 1 +34.77 1 +34.78 2 +34.79 1 +34.80 3 +34.82 2 +34.83 1 +34.84 2 +34.85 2 +34.87 4 +34.88 3 +34.90 2 +34.91 1 +34.94 1 +34.95 1 +34.96 1 +34.98 1 +35.01 1 +35.03 1 +35.05 1 +35.06 1 +35.07 1 +35.08 1 +35.09 1 +35.10 2 +35.11 1 +35.13 2 +35.14 4 +35.15 2 +35.16 1 +35.17 1 +35.19 1 +35.20 1 +35.21 3 +35.23 1 +35.25 1 +35.26 1 +35.27 1 +35.28 1 +35.29 1 +35.32 2 +35.33 1 +35.36 1 +35.37 1 +35.38 2 +35.40 3 +35.42 1 +35.43 1 +35.45 3 +35.46 2 +35.48 2 +35.51 1 +35.55 1 +35.56 1 +35.58 1 +35.59 1 +35.63 1 +35.65 1 +35.66 1 +35.68 2 +35.70 1 +35.73 2 +35.75 2 +35.76 1 +35.77 1 +35.78 2 +35.79 1 +35.80 2 +35.82 1 +35.83 2 +35.84 1 +35.85 1 +35.86 2 +35.89 1 +35.90 2 +35.93 1 +35.94 2 +35.95 1 +35.96 1 +35.97 2 +35.98 1 +36.04 2 +36.05 1 +36.06 1 +36.10 1 +36.11 3 +36.13 1 +36.17 1 +36.18 2 +36.21 1 +36.22 2 +36.23 1 +36.24 1 +36.25 2 +36.28 1 +36.30 1 +36.31 1 +36.32 2 +36.33 4 +36.34 2 +36.36 2 +36.37 1 +36.38 5 +36.41 4 +36.42 1 +36.43 3 +36.45 1 +36.48 1 +36.49 1 +36.50 2 +36.51 2 +36.52 1 +36.53 1 +36.54 2 +36.56 1 +36.57 1 +36.58 2 +36.59 1 +36.60 1 +36.61 2 +36.63 1 +36.65 1 +36.66 2 +36.68 1 +36.69 1 +36.73 2 +36.74 2 +36.75 3 +36.76 2 +36.77 6 +36.80 1 +36.81 1 +36.82 1 +36.84 1 +36.85 2 +36.86 1 +36.87 1 +36.88 1 +36.89 1 +36.90 1 +36.91 1 +36.92 2 +36.93 2 +36.95 3 +36.96 3 +36.97 1 +36.98 1 +36.99 1 +37.00 1 +37.01 1 +37.02 2 +37.03 1 +37.05 2 +37.06 3 +37.07 3 +37.10 1 +37.11 1 +37.12 2 +37.15 2 +37.16 1 +37.17 1 +37.18 1 +37.22 2 +37.24 1 +37.25 2 +37.27 1 +37.28 1 +37.30 1 +37.31 1 +37.32 1 +37.34 4 +37.35 2 +37.36 1 +37.38 1 +37.39 2 +37.41 2 +37.42 1 +37.43 2 +37.44 1 +37.45 2 +37.46 1 +37.48 3 +37.53 1 +37.55 1 +37.56 1 +37.60 5 +37.61 1 +37.63 2 +37.64 1 +37.65 1 +37.66 1 +37.67 1 +37.69 1 +37.70 1 +37.71 2 +37.73 1 +37.74 3 +37.76 1 +37.78 2 +37.79 1 +37.80 2 +37.82 2 +37.84 3 +37.85 1 +37.86 3 +37.88 1 +37.91 1 +37.93 1 +37.94 1 +37.95 3 +37.97 2 +37.98 1 +37.99 1 +38.00 1 +38.01 2 +38.02 1 +38.03 2 +38.04 2 +38.05 1 +38.08 3 +38.09 1 +38.11 1 +38.13 1 +38.14 2 +38.15 1 +38.16 1 +38.17 1 +38.19 1 +38.20 1 +38.22 2 +38.23 3 +38.24 1 +38.25 2 +38.26 1 +38.27 2 +38.29 1 +38.30 1 +38.31 1 +38.33 1 +38.34 2 +38.35 1 +38.36 1 +38.37 1 +38.39 1 +38.41 1 +38.42 1 +38.43 1 +38.44 1 +38.45 1 +38.46 1 +38.47 1 +38.48 1 +38.49 1 +38.50 4 +38.54 3 +38.55 2 +38.56 1 +38.58 1 +38.60 3 +38.61 3 +38.64 1 +38.65 1 +38.66 1 +38.67 2 +38.70 3 +38.71 2 +38.73 2 +38.74 1 +38.75 2 +38.76 1 +38.77 1 +38.81 1 +38.82 2 +38.83 1 +38.84 2 +38.88 3 +38.91 2 +38.93 2 +38.98 4 +39.04 2 +39.05 2 +39.06 1 +39.07 1 +39.09 1 +39.12 5 +39.14 1 +39.15 1 +39.16 2 +39.18 1 +39.19 1 +39.20 2 +39.21 1 +39.22 1 +39.25 1 +39.27 2 +39.31 1 +39.32 2 +39.36 1 +39.37 3 +39.38 1 +39.40 2 +39.42 4 +39.43 1 +39.47 2 +39.48 2 +39.51 1 +39.52 2 +39.55 1 +39.56 2 +39.60 2 +39.61 4 +39.62 1 +39.63 1 +39.67 1 +39.69 2 +39.70 2 +39.71 1 +39.74 2 +39.75 1 +39.76 1 +39.77 2 +39.80 1 +39.81 2 +39.82 2 +39.84 1 +39.85 1 +39.86 2 +39.88 1 +39.89 2 +39.90 3 +39.91 1 +39.92 1 +39.94 2 +39.95 1 +39.96 1 +39.97 1 +39.98 2 +4.03 1 +4.05 1 +4.07 1 +4.08 1 +4.09 1 +4.11 1 +4.14 2 +4.16 2 +4.17 1 +4.18 1 +4.19 1 +4.21 2 +4.22 1 +4.23 1 +4.24 1 +4.25 1 +4.26 1 +4.28 2 +4.29 1 +4.30 2 +4.31 2 +4.32 1 +4.33 2 +4.34 1 +4.35 2 +4.36 2 +4.39 1 +4.40 1 +4.41 1 +4.43 1 +4.44 2 +4.45 1 +4.47 2 +4.49 2 +4.50 1 +4.51 1 +4.52 1 +4.54 1 +4.55 3 +4.56 1 +4.57 1 +4.58 2 +4.59 1 +4.60 5 +4.62 3 +4.64 2 +4.67 1 +4.68 1 +4.70 3 +4.73 1 +4.74 1 +4.77 1 +4.78 2 +4.79 1 +4.82 1 +4.85 1 +4.87 1 +4.88 2 +4.89 3 +4.90 1 +4.91 1 +4.93 1 +4.94 1 +4.95 1 +4.97 2 +40.04 1 +40.06 2 +40.07 2 +40.10 1 +40.11 2 +40.12 3 +40.14 1 +40.18 2 +40.21 1 +40.22 1 +40.28 1 +40.29 1 +40.30 1 +40.33 2 +40.35 1 +40.39 4 +40.40 1 +40.41 1 +40.42 1 +40.43 1 +40.48 1 +40.50 3 +40.51 1 +40.52 1 +40.54 2 +40.56 1 +40.57 2 +40.58 3 +40.59 1 +40.60 2 +40.62 2 +40.63 3 +40.64 1 +40.65 2 +40.66 1 +40.67 1 +40.68 3 +40.70 1 +40.73 1 +40.74 1 +40.76 1 +40.79 1 +40.81 2 +40.82 1 +40.84 1 +40.87 1 +40.88 1 +40.90 1 +40.91 1 +40.92 1 +40.93 1 +40.94 1 +40.96 1 +40.97 1 +40.99 2 +41.00 1 +41.01 2 +41.02 2 +41.04 1 +41.05 2 +41.06 1 +41.08 2 +41.10 1 +41.11 2 +41.13 3 +41.14 1 +41.16 2 +41.19 2 +41.21 1 +41.26 1 +41.27 2 +41.28 1 +41.30 2 +41.32 1 +41.33 1 +41.35 1 +41.37 2 +41.38 1 +41.39 2 +41.40 1 +41.41 1 +41.43 1 +41.44 2 +41.45 1 +41.46 1 +41.47 1 +41.51 2 +41.52 2 +41.53 1 +41.54 2 +41.55 2 +41.56 2 +41.57 2 +41.58 2 +41.59 1 +41.60 1 +41.61 3 +41.62 1 +41.63 1 +41.67 2 +41.68 3 +41.69 2 +41.70 2 +41.71 2 +41.72 1 +41.75 1 +41.76 1 +41.78 1 +41.80 1 +41.81 2 +41.83 1 +41.86 1 +41.87 3 +41.88 2 +41.89 1 +41.90 2 +41.91 1 +41.92 1 +41.93 1 +41.96 1 +41.97 1 +42.00 1 +42.01 2 +42.02 3 +42.04 1 +42.06 1 +42.09 4 +42.11 1 +42.13 2 +42.14 1 +42.17 1 +42.19 1 +42.20 1 +42.22 2 +42.25 1 +42.26 2 +42.28 2 +42.29 1 +42.30 2 +42.33 1 +42.34 1 +42.35 1 +42.36 2 +42.38 1 +42.39 2 +42.40 2 +42.42 1 +42.43 1 +42.44 1 +42.47 1 +42.48 2 +42.52 2 +42.53 1 +42.54 2 +42.57 1 +42.58 2 +42.60 2 +42.62 2 +42.64 1 +42.66 2 +42.67 1 +42.68 3 +42.70 1 +42.71 1 +42.74 5 +42.76 2 +42.78 2 +42.79 1 +42.81 1 +42.82 1 +42.84 1 +42.85 5 +42.86 1 +42.87 1 +42.88 3 +42.89 1 +42.90 1 +42.91 1 +42.93 4 +42.94 2 +42.98 1 +42.99 1 +43.00 1 +43.01 1 +43.03 1 +43.05 1 +43.08 1 +43.09 1 +43.10 1 +43.11 2 +43.13 2 +43.16 1 +43.18 3 +43.25 2 +43.26 5 +43.29 1 +43.31 2 +43.32 1 +43.33 2 +43.37 3 +43.39 1 +43.41 2 +43.42 1 +43.43 1 +43.44 1 +43.45 1 +43.47 1 +43.48 1 +43.49 1 +43.51 1 +43.53 1 +43.55 3 +43.56 2 +43.58 1 +43.59 1 +43.60 1 +43.61 1 +43.62 1 +43.65 2 +43.66 3 +43.67 1 +43.71 1 +43.72 2 +43.73 1 +43.74 1 +43.75 1 +43.77 1 +43.78 2 +43.79 1 +43.80 4 +43.81 3 +43.82 1 +43.84 2 +43.85 1 +43.87 1 +43.88 1 +43.89 1 +43.92 2 +43.93 1 +43.94 1 +43.95 1 +44.00 1 +44.02 1 +44.04 1 +44.09 2 +44.10 2 +44.17 1 +44.18 1 +44.19 1 +44.20 2 +44.21 4 +44.22 1 +44.24 1 +44.25 1 +44.26 2 +44.30 2 +44.31 1 +44.32 4 +44.33 2 +44.35 1 +44.36 2 +44.37 1 +44.38 1 +44.39 1 +44.41 2 +44.44 1 +44.47 2 +44.49 1 +44.50 1 +44.51 2 +44.52 1 +44.53 2 +44.56 1 +44.57 1 +44.58 3 +44.59 2 +44.60 2 +44.61 2 +44.65 2 +44.66 2 +44.67 2 +44.68 1 +44.69 1 +44.72 1 +44.74 1 +44.75 3 +44.76 1 +44.77 1 +44.78 1 +44.80 1 +44.81 2 +44.83 1 +44.84 1 +44.85 2 +44.88 2 +44.90 1 +44.91 2 +44.92 1 +44.93 1 +44.94 4 +44.95 1 +44.96 5 +44.98 1 +45.00 1 +45.02 1 +45.03 1 +45.05 1 +45.06 1 +45.08 2 +45.12 1 +45.13 1 +45.15 1 +45.19 1 +45.20 1 +45.21 2 +45.23 1 +45.24 1 +45.25 2 +45.29 4 +45.30 1 +45.31 1 +45.32 1 +45.33 2 +45.34 1 +45.35 1 +45.36 3 +45.38 3 +45.40 2 +45.41 1 +45.43 1 +45.45 3 +45.46 1 +45.47 1 +45.48 1 +45.49 1 +45.54 1 +45.58 2 +45.60 1 +45.61 1 +45.63 2 +45.64 2 +45.65 1 +45.67 2 +45.69 4 +45.70 2 +45.74 1 +45.77 2 +45.78 1 +45.81 2 +45.82 1 +45.83 2 +45.84 1 +45.86 4 +45.89 3 +45.90 1 +45.92 1 +45.94 1 +45.96 2 +45.97 1 +45.98 3 +45.99 1 +46.00 1 +46.01 1 +46.04 1 +46.05 1 +46.06 1 +46.09 3 +46.11 1 +46.12 1 +46.13 1 +46.14 3 +46.15 1 +46.19 2 +46.20 1 +46.21 1 +46.23 2 +46.24 1 +46.25 1 +46.26 2 +46.29 2 +46.30 1 +46.32 2 +46.33 1 +46.34 2 +46.35 1 +46.36 3 +46.37 1 +46.39 2 +46.40 1 +46.41 1 +46.42 2 +46.43 1 +46.45 1 +46.47 2 +46.48 2 +46.50 1 +46.53 1 +46.54 1 +46.55 1 +46.57 1 +46.58 1 +46.60 1 +46.61 1 +46.62 1 +46.66 1 +46.70 1 +46.71 1 +46.72 2 +46.73 1 +46.74 1 +46.75 1 +46.77 1 +46.78 2 +46.81 1 +46.84 2 +46.86 1 +46.87 2 +46.90 1 +46.91 1 +46.92 2 +46.93 4 +46.94 1 +46.95 1 +46.96 1 +46.97 1 +46.98 2 +46.99 3 +47.02 1 +47.03 2 +47.06 1 +47.07 1 +47.08 1 +47.09 3 +47.11 1 +47.12 3 +47.13 1 +47.16 3 +47.17 1 +47.19 1 +47.23 3 +47.25 3 +47.26 2 +47.29 1 +47.30 1 +47.31 1 +47.32 1 +47.34 1 +47.35 3 +47.37 1 +47.38 1 +47.41 2 +47.42 1 +47.45 5 +47.46 2 +47.48 1 +47.49 2 +47.51 2 +47.52 1 +47.53 1 +47.54 3 +47.55 1 +47.56 1 +47.57 2 +47.60 1 +47.61 2 +47.62 1 +47.63 1 +47.64 1 +47.65 1 +47.66 1 +47.68 1 +47.69 1 +47.70 1 +47.71 1 +47.72 1 +47.75 1 +47.77 3 +47.79 1 +47.81 2 +47.82 1 +47.83 1 +47.85 1 +47.86 1 +47.88 1 +47.89 1 +47.91 5 +47.94 1 +47.96 1 +47.97 3 +47.99 2 +48.02 5 +48.05 1 +48.06 1 +48.07 2 +48.08 2 +48.11 1 +48.12 1 +48.13 3 +48.14 1 +48.15 1 +48.16 1 +48.17 3 +48.18 1 +48.19 1 +48.20 4 +48.21 3 +48.22 1 +48.24 1 +48.25 1 +48.26 1 +48.27 2 +48.28 1 +48.29 2 +48.31 1 +48.32 1 +48.33 1 +48.34 2 +48.36 1 +48.37 2 +48.38 1 +48.41 1 +48.42 1 +48.43 2 +48.48 3 +48.49 2 +48.50 3 +48.51 3 +48.54 3 +48.55 1 +48.59 1 +48.60 1 +48.61 2 +48.63 1 +48.64 1 +48.68 3 +48.69 2 +48.70 1 +48.71 2 +48.73 2 +48.75 1 +48.76 2 +48.77 3 +48.78 1 +48.79 2 +48.80 3 +48.81 3 +48.83 1 +48.84 4 +48.85 1 +48.86 1 +48.88 2 +48.90 3 +48.91 1 +48.92 1 +48.93 1 +48.94 2 +48.95 1 +48.96 2 +48.97 1 +48.98 1 +49.00 1 +49.01 2 +49.02 2 +49.03 1 +49.05 1 +49.06 1 +49.07 1 +49.10 2 +49.12 3 +49.13 1 +49.14 1 +49.15 1 +49.17 1 +49.18 2 +49.19 3 +49.21 1 +49.23 1 +49.24 1 +49.26 3 +49.28 1 +49.30 1 +49.31 2 +49.34 1 +49.35 1 +49.38 2 +49.39 1 +49.40 1 +49.43 2 +49.44 1 +49.46 1 +49.49 2 +49.50 1 +49.51 1 +49.54 3 +49.55 1 +49.57 1 +49.60 3 +49.62 2 +49.65 1 +49.67 3 +49.69 2 +49.70 3 +49.71 2 +49.72 2 +49.73 4 +49.75 2 +49.78 1 +49.79 1 +49.80 2 +49.81 2 +49.82 2 +49.83 2 +49.84 2 +49.85 1 +49.87 1 +49.88 4 +49.89 2 +49.90 1 +49.91 3 +49.92 2 +49.93 2 +49.95 1 +49.97 1 +49.99 2 +5.00 2 +5.01 1 +5.02 1 +5.03 1 +5.04 1 +5.05 4 +5.06 1 +5.07 2 +5.09 2 +5.10 3 +5.12 1 +5.13 1 +5.14 2 +5.15 1 +5.16 1 +5.18 1 +5.19 1 +5.20 1 +5.21 2 +5.24 3 +5.26 1 +5.28 1 +5.31 2 +5.33 1 +5.35 1 +5.37 2 +5.39 1 +5.42 2 +5.43 1 +5.45 1 +5.46 4 +5.47 1 +5.49 3 +5.50 3 +5.51 1 +5.52 1 +5.53 1 +5.58 1 +5.63 1 +5.64 2 +5.65 2 +5.68 2 +5.69 1 +5.71 1 +5.72 2 +5.73 1 +5.75 1 +5.76 2 +5.81 1 +5.82 1 +5.83 3 +5.84 1 +5.86 2 +5.88 2 +5.89 1 +5.90 1 +5.93 3 +5.96 1 +5.97 1 +5.98 1 +5.99 1 +50.00 2 +50.01 2 +50.02 1 +50.03 2 +50.04 2 +50.06 1 +50.08 1 +50.09 1 +50.10 1 +50.13 2 +50.14 1 +50.15 1 +50.17 2 +50.21 1 +50.22 3 +50.25 2 +50.26 4 +50.29 1 +50.30 2 +50.31 2 +50.32 1 +50.33 1 +50.34 1 +50.38 1 +50.39 1 +50.40 1 +50.42 1 +50.45 2 +50.50 2 +50.51 1 +50.53 1 +50.54 1 +50.55 1 +50.56 1 +50.57 3 +50.59 2 +50.60 5 +50.61 3 +50.62 1 +50.63 1 +50.66 1 +50.67 1 +50.71 1 +50.72 1 +50.73 3 +50.75 1 +50.76 1 +50.78 2 +50.79 2 +50.80 2 +50.83 1 +50.84 2 +50.85 1 +50.86 2 +50.90 1 +50.96 1 +50.97 2 +50.98 1 +50.99 1 +51.01 1 +51.02 1 +51.04 2 +51.08 4 +51.09 2 +51.10 1 +51.12 1 +51.13 3 +51.14 3 +51.15 1 +51.18 1 +51.19 1 +51.21 1 +51.29 3 +51.32 1 +51.33 2 +51.36 2 +51.39 1 +51.40 2 +51.43 3 +51.45 1 +51.48 2 +51.52 1 +51.53 1 +51.54 3 +51.55 2 +51.56 2 +51.58 3 +51.59 1 +51.62 1 +51.64 1 +51.66 1 +51.68 4 +51.69 1 +51.70 1 +51.71 1 +51.74 2 +51.76 2 +51.78 1 +51.79 1 +51.82 1 +51.83 1 +51.84 2 +51.85 1 +51.86 1 +51.88 2 +51.89 2 +51.90 3 +51.91 3 +51.94 1 +51.95 2 +52.01 1 +52.02 2 +52.05 2 +52.08 3 +52.10 1 +52.12 1 +52.13 1 +52.17 1 +52.19 1 +52.20 1 +52.23 2 +52.24 2 +52.28 2 +52.29 1 +52.32 1 +52.33 1 +52.36 1 +52.38 2 +52.41 3 +52.42 1 +52.43 1 +52.45 1 +52.48 1 +52.49 1 +52.50 1 +52.51 2 +52.52 2 +52.53 1 +52.55 1 +52.56 2 +52.58 1 +52.59 2 +52.60 2 +52.61 1 +52.62 1 +52.63 1 +52.64 1 +52.65 2 +52.66 1 +52.67 1 +52.68 1 +52.69 2 +52.70 3 +52.72 1 +52.74 1 +52.75 1 +52.77 1 +52.78 2 +52.79 1 +52.80 3 +52.81 2 +52.83 2 +52.84 2 +52.85 1 +52.88 1 +52.89 2 +52.90 2 +52.92 1 +52.94 1 +52.95 2 +52.96 1 +52.97 1 +52.98 1 +53.02 1 +53.04 2 +53.06 1 +53.07 2 +53.08 2 +53.09 1 +53.10 2 +53.13 1 +53.14 3 +53.16 2 +53.21 1 +53.22 1 +53.24 1 +53.25 1 +53.27 2 +53.28 2 +53.29 1 +53.30 1 +53.31 3 +53.34 1 +53.35 2 +53.36 1 +53.37 3 +53.38 2 +53.39 2 +53.40 1 +53.41 3 +53.46 1 +53.47 1 +53.48 2 +53.49 1 +53.50 2 +53.51 1 +53.52 2 +53.53 3 +53.55 1 +53.56 2 +53.57 1 +53.58 1 +53.59 1 +53.60 1 +53.61 4 +53.63 1 +53.64 2 +53.65 4 +53.68 1 +53.69 1 +53.72 2 +53.73 1 +53.74 1 +53.75 1 +53.77 1 +53.79 1 +53.80 1 +53.82 1 +53.83 1 +53.84 2 +53.85 2 +53.86 1 +53.89 1 +53.91 2 +53.92 3 +53.93 1 +53.94 4 +53.96 1 +53.97 1 +53.98 2 +54.00 1 +54.01 1 +54.02 3 +54.03 3 +54.04 4 +54.05 2 +54.07 1 +54.09 1 +54.10 2 +54.11 1 +54.15 1 +54.16 2 +54.18 1 +54.19 1 +54.20 2 +54.21 1 +54.23 1 +54.25 1 +54.26 2 +54.27 1 +54.28 1 +54.29 2 +54.30 1 +54.31 2 +54.32 2 +54.33 1 +54.34 1 +54.35 2 +54.37 1 +54.39 1 +54.41 1 +54.42 1 +54.43 2 +54.45 2 +54.46 1 +54.47 2 +54.48 1 +54.49 1 +54.50 1 +54.51 1 +54.53 2 +54.54 1 +54.55 1 +54.56 1 +54.57 1 +54.58 1 +54.61 1 +54.62 2 +54.64 3 +54.65 1 +54.66 3 +54.67 2 +54.68 2 +54.69 2 +54.70 1 +54.72 3 +54.73 1 +54.74 1 +54.75 5 +54.76 3 +54.79 1 +54.80 2 +54.82 2 +54.85 1 +54.86 1 +54.88 1 +54.89 1 +54.90 1 +54.92 1 +54.93 1 +54.94 1 +54.96 1 +54.98 3 +55.00 2 +55.02 1 +55.03 1 +55.04 3 +55.05 1 +55.06 1 +55.07 1 +55.09 1 +55.10 1 +55.12 3 +55.13 1 +55.14 1 +55.16 1 +55.18 1 +55.19 1 +55.22 1 +55.23 2 +55.24 1 +55.25 1 +55.26 1 +55.30 2 +55.31 1 +55.33 1 +55.36 1 +55.37 2 +55.38 2 +55.39 3 +55.40 2 +55.41 4 +55.42 1 +55.43 1 +55.44 1 +55.45 3 +55.47 2 +55.49 2 +55.50 1 +55.56 1 +55.59 1 +55.60 1 +55.61 1 +55.64 1 +55.66 1 +55.68 3 +55.69 1 +55.70 2 +55.71 1 +55.73 1 +55.75 2 +55.76 3 +55.77 1 +55.78 1 +55.80 1 +55.81 2 +55.82 2 +55.83 2 +55.84 1 +55.86 1 +55.87 1 +55.89 2 +55.90 4 +55.92 1 +55.93 1 +55.94 4 +55.96 1 +55.97 2 +56.00 1 +56.01 1 +56.02 2 +56.04 3 +56.05 1 +56.06 2 +56.07 2 +56.08 2 +56.10 1 +56.11 1 +56.12 1 +56.13 1 +56.15 2 +56.16 1 +56.18 1 +56.20 1 +56.21 1 +56.22 1 +56.23 1 +56.27 1 +56.28 1 +56.32 2 +56.33 3 +56.34 1 +56.36 2 +56.37 1 +56.38 1 +56.41 1 +56.43 1 +56.44 1 +56.45 2 +56.48 1 +56.49 1 +56.52 1 +56.53 2 +56.54 1 +56.55 1 +56.56 2 +56.57 1 +56.58 1 +56.59 3 +56.60 2 +56.63 2 +56.64 2 +56.65 4 +56.70 3 +56.72 1 +56.73 2 +56.74 4 +56.79 1 +56.82 2 +56.83 4 +56.84 1 +56.85 2 +56.86 2 +56.90 2 +56.97 1 +56.98 2 +56.99 1 +57.01 1 +57.03 2 +57.07 1 +57.09 2 +57.10 3 +57.11 1 +57.12 2 +57.15 1 +57.17 2 +57.19 1 +57.20 1 +57.21 3 +57.22 2 +57.23 1 +57.25 2 +57.26 1 +57.28 1 +57.29 2 +57.30 1 +57.32 1 +57.33 4 +57.34 2 +57.36 1 +57.37 1 +57.38 2 +57.39 2 +57.40 1 +57.41 1 +57.42 1 +57.46 1 +57.47 1 +57.48 1 +57.49 2 +57.50 1 +57.53 1 +57.55 2 +57.56 1 +57.58 5 +57.59 1 +57.60 1 +57.61 3 +57.63 1 +57.64 1 +57.65 2 +57.66 2 +57.67 3 +57.68 2 +57.69 3 +57.71 2 +57.73 1 +57.74 1 +57.79 2 +57.80 1 +57.82 2 +57.83 1 +57.86 2 +57.87 2 +57.88 2 +57.89 2 +57.90 1 +57.91 2 +57.93 1 +57.94 1 +57.95 1 +57.97 1 +57.99 3 +58.01 1 +58.02 1 +58.03 3 +58.04 1 +58.05 5 +58.06 2 +58.07 1 +58.08 1 +58.10 2 +58.12 1 +58.14 1 +58.15 2 +58.19 1 +58.20 1 +58.21 1 +58.23 3 +58.24 1 +58.25 3 +58.27 2 +58.28 2 +58.29 1 +58.30 1 +58.31 1 +58.34 1 +58.38 1 +58.39 2 +58.40 2 +58.41 3 +58.42 1 +58.44 1 +58.45 1 +58.46 1 +58.47 1 +58.50 1 +58.51 1 +58.52 1 +58.54 3 +58.56 1 +58.58 1 +58.59 1 +58.60 1 +58.62 1 +58.63 1 +58.64 2 +58.65 1 +58.66 1 +58.67 2 +58.68 2 +58.69 1 +58.70 1 +58.71 1 +58.75 2 +58.78 2 +58.79 4 +58.80 4 +58.81 3 +58.82 4 +58.83 3 +58.84 2 +58.85 3 +58.86 2 +58.87 1 +58.88 1 +58.89 1 +58.90 4 +58.92 2 +58.94 1 +58.96 2 +58.97 2 +58.99 2 +59.00 1 +59.01 1 +59.02 2 +59.03 3 +59.06 1 +59.08 1 +59.10 2 +59.12 2 +59.13 2 +59.16 1 +59.17 2 +59.18 1 +59.19 1 +59.21 1 +59.24 1 +59.26 1 +59.28 2 +59.29 1 +59.30 1 +59.32 2 +59.33 1 +59.35 2 +59.36 1 +59.37 1 +59.38 1 +59.40 2 +59.41 2 +59.42 2 +59.43 2 +59.44 2 +59.46 2 +59.47 2 +59.48 1 +59.49 5 +59.51 2 +59.52 1 +59.53 2 +59.54 1 +59.55 2 +59.56 3 +59.57 1 +59.58 1 +59.59 1 +59.62 1 +59.63 1 +59.64 2 +59.67 1 +59.68 2 +59.69 2 +59.70 1 +59.71 2 +59.72 1 +59.74 1 +59.76 1 +59.77 3 +59.78 1 +59.79 1 +59.82 1 +59.83 1 +59.89 1 +59.90 1 +59.91 1 +59.92 2 +59.93 2 +59.94 1 +59.95 2 +59.96 1 +59.99 1 +6.00 2 +6.02 2 +6.03 1 +6.04 1 +6.06 2 +6.09 1 +6.10 1 +6.11 1 +6.14 1 +6.15 1 +6.16 1 +6.17 1 +6.18 1 +6.19 2 +6.21 2 +6.22 1 +6.24 2 +6.25 1 +6.26 1 +6.29 2 +6.30 1 +6.35 2 +6.37 1 +6.39 2 +6.40 1 +6.41 1 +6.42 2 +6.43 1 +6.47 1 +6.48 1 +6.50 1 +6.54 2 +6.57 1 +6.59 1 +6.60 1 +6.61 4 +6.62 1 +6.63 1 +6.64 3 +6.66 1 +6.71 1 +6.74 1 +6.76 2 +6.78 2 +6.79 2 +6.81 1 +6.82 2 +6.83 2 +6.84 1 +6.85 2 +6.87 1 +6.88 1 +6.89 2 +6.90 1 +6.92 1 +6.93 1 +6.99 1 +60.00 2 +60.01 3 +60.02 1 +60.04 2 +60.05 3 +60.13 1 +60.16 1 +60.17 2 +60.18 3 +60.20 2 +60.22 1 +60.23 1 +60.24 2 +60.25 2 +60.26 1 +60.29 3 +60.30 1 +60.32 1 +60.35 2 +60.36 1 +60.37 2 +60.38 1 +60.39 1 +60.41 1 +60.42 1 +60.45 1 +60.46 3 +60.48 1 +60.51 1 +60.52 1 +60.53 1 +60.55 1 +60.56 2 +60.57 2 +60.58 1 +60.59 1 +60.60 1 +60.62 1 +60.63 1 +60.64 1 +60.66 2 +60.67 2 +60.70 1 +60.75 2 +60.77 1 +60.78 2 +60.80 2 +60.81 1 +60.82 1 +60.83 1 +60.85 3 +60.86 1 +60.87 3 +60.88 2 +60.89 1 +60.90 2 +60.92 1 +60.93 1 +60.94 2 +60.96 1 +60.97 1 +60.98 1 +60.99 1 +61.00 2 +61.02 1 +61.03 1 +61.04 1 +61.05 2 +61.06 1 +61.08 1 +61.11 4 +61.12 2 +61.13 2 +61.14 1 +61.15 2 +61.16 2 +61.18 1 +61.19 1 +61.20 1 +61.22 1 +61.23 1 +61.24 3 +61.26 1 +61.28 2 +61.29 2 +61.30 3 +61.31 2 +61.33 1 +61.34 3 +61.36 3 +61.38 2 +61.39 1 +61.41 2 +61.42 1 +61.43 1 +61.44 2 +61.46 1 +61.47 1 +61.50 4 +61.52 3 +61.53 1 +61.54 1 +61.55 1 +61.56 4 +61.57 2 +61.58 2 +61.59 2 +61.64 1 +61.67 1 +61.68 2 +61.69 1 +61.70 1 +61.77 2 +61.79 1 +61.81 2 +61.82 2 +61.83 1 +61.84 1 +61.85 2 +61.88 2 +61.89 3 +61.90 2 +61.93 1 +61.94 2 +61.96 2 +61.99 1 +62.00 2 +62.01 3 +62.02 1 +62.03 1 +62.04 2 +62.09 2 +62.11 3 +62.12 1 +62.14 1 +62.16 2 +62.17 1 +62.20 2 +62.21 1 +62.25 2 +62.26 2 +62.28 1 +62.31 2 +62.32 2 +62.33 1 +62.34 1 +62.35 4 +62.36 1 +62.37 1 +62.39 2 +62.41 1 +62.42 2 +62.43 2 +62.44 1 +62.45 1 +62.47 1 +62.48 2 +62.49 1 +62.51 2 +62.52 1 +62.53 1 +62.56 1 +62.58 1 +62.59 4 +62.60 1 +62.62 1 +62.65 2 +62.66 1 +62.67 1 +62.68 1 +62.70 3 +62.71 1 +62.72 1 +62.73 2 +62.77 1 +62.79 1 +62.80 2 +62.81 3 +62.82 1 +62.83 1 +62.84 4 +62.86 1 +62.89 3 +62.91 2 +62.92 1 +62.93 3 +62.95 2 +62.96 1 +62.98 1 +62.99 2 +63.00 1 +63.01 2 +63.02 1 +63.03 1 +63.05 2 +63.06 1 +63.07 1 +63.08 2 +63.10 1 +63.11 1 +63.12 1 +63.14 4 +63.15 1 +63.16 1 +63.17 1 +63.18 1 +63.21 2 +63.22 1 +63.23 2 +63.24 3 +63.25 2 +63.26 1 +63.27 2 +63.28 2 +63.29 2 +63.30 3 +63.31 1 +63.33 1 +63.34 1 +63.35 1 +63.36 3 +63.39 2 +63.40 1 +63.41 1 +63.43 1 +63.46 1 +63.47 1 +63.48 1 +63.49 3 +63.50 1 +63.51 2 +63.52 1 +63.54 2 +63.55 3 +63.57 2 +63.58 1 +63.59 1 +63.60 1 +63.61 1 +63.64 1 +63.65 3 +63.66 2 +63.70 1 +63.72 2 +63.73 1 +63.74 2 +63.75 1 +63.77 2 +63.78 3 +63.80 2 +63.81 2 +63.83 1 +63.85 1 +63.87 1 +63.90 2 +63.92 2 +63.93 3 +63.94 3 +63.99 3 +64.00 2 +64.01 2 +64.04 1 +64.05 2 +64.06 2 +64.09 2 +64.12 2 +64.14 1 +64.15 2 +64.16 1 +64.17 1 +64.18 2 +64.19 2 +64.20 2 +64.21 2 +64.22 3 +64.23 3 +64.24 2 +64.28 1 +64.29 2 +64.30 1 +64.32 1 +64.33 2 +64.34 1 +64.35 2 +64.36 2 +64.38 2 +64.39 2 +64.40 4 +64.41 2 +64.42 2 +64.43 1 +64.44 2 +64.45 1 +64.46 2 +64.49 2 +64.52 2 +64.55 1 +64.60 1 +64.61 1 +64.62 1 +64.63 1 +64.65 3 +64.68 1 +64.69 1 +64.70 2 +64.71 3 +64.75 3 +64.76 1 +64.77 3 +64.79 1 +64.80 1 +64.81 2 +64.82 1 +64.84 1 +64.88 3 +64.89 1 +64.90 2 +64.91 3 +64.94 2 +64.95 3 +64.98 3 +64.99 2 +65.01 1 +65.02 1 +65.03 1 +65.05 1 +65.06 1 +65.07 1 +65.08 2 +65.13 2 +65.16 1 +65.17 2 +65.18 3 +65.20 1 +65.21 2 +65.22 1 +65.23 1 +65.24 2 +65.25 1 +65.26 1 +65.27 1 +65.28 2 +65.30 1 +65.31 1 +65.32 1 +65.33 2 +65.34 1 +65.35 1 +65.37 5 +65.40 1 +65.42 1 +65.43 1 +65.45 2 +65.46 2 +65.49 2 +65.50 3 +65.53 1 +65.57 1 +65.58 2 +65.61 1 +65.62 1 +65.66 2 +65.69 1 +65.72 1 +65.74 1 +65.75 1 +65.77 3 +65.78 2 +65.79 2 +65.81 2 +65.82 3 +65.85 5 +65.86 1 +65.87 2 +65.88 1 +65.89 1 +65.90 1 +65.91 1 +65.92 1 +65.93 1 +65.98 1 +65.99 1 +66.01 1 +66.02 1 +66.07 2 +66.08 1 +66.09 1 +66.10 1 +66.11 2 +66.13 1 +66.15 2 +66.16 2 +66.19 1 +66.22 1 +66.23 3 +66.24 2 +66.25 1 +66.27 2 +66.30 1 +66.32 2 +66.33 1 +66.34 3 +66.36 3 +66.37 1 +66.39 1 +66.41 1 +66.43 2 +66.45 1 +66.46 1 +66.47 1 +66.49 1 +66.52 1 +66.53 3 +66.54 1 +66.55 1 +66.56 1 +66.57 1 +66.58 2 +66.59 1 +66.60 1 +66.61 1 +66.62 2 +66.64 1 +66.67 1 +66.68 2 +66.71 1 +66.74 2 +66.76 2 +66.78 1 +66.79 1 +66.80 2 +66.81 1 +66.83 1 +66.84 1 +66.85 2 +66.88 1 +66.89 3 +66.91 3 +66.93 2 +66.94 1 +66.95 1 +66.98 2 +66.99 1 +67.00 1 +67.02 2 +67.03 1 +67.04 3 +67.05 1 +67.07 1 +67.08 1 +67.09 1 +67.10 1 +67.14 1 +67.16 1 +67.17 1 +67.18 3 +67.20 1 +67.22 2 +67.23 2 +67.24 1 +67.29 2 +67.30 1 +67.34 2 +67.37 1 +67.40 1 +67.41 1 +67.42 1 +67.46 1 +67.50 3 +67.52 1 +67.54 4 +67.55 1 +67.56 1 +67.59 1 +67.60 1 +67.65 1 +67.66 1 +67.69 1 +67.70 1 +67.71 1 +67.73 1 +67.74 2 +67.78 1 +67.80 2 +67.81 2 +67.82 2 +67.83 2 +67.85 2 +67.87 4 +67.89 1 +67.90 2 +67.91 3 +67.93 2 +67.94 1 +67.99 1 +68.00 3 +68.01 3 +68.02 3 +68.04 2 +68.05 1 +68.06 1 +68.07 1 +68.08 1 +68.09 7 +68.10 2 +68.12 1 +68.14 1 +68.15 2 +68.16 1 +68.17 1 +68.18 1 +68.21 3 +68.24 1 +68.25 3 +68.26 2 +68.28 2 +68.30 3 +68.32 1 +68.33 1 +68.35 2 +68.36 1 +68.39 1 +68.40 3 +68.42 1 +68.43 1 +68.45 2 +68.46 1 +68.48 2 +68.49 2 +68.50 1 +68.54 2 +68.57 1 +68.60 1 +68.64 1 +68.67 1 +68.68 1 +68.72 1 +68.74 1 +68.76 2 +68.79 1 +68.80 4 +68.81 2 +68.82 1 +68.83 1 +68.84 2 +68.85 1 +68.87 1 +68.88 1 +68.89 2 +68.90 2 +68.91 1 +68.92 1 +68.94 1 +68.95 2 +68.97 2 +68.98 1 +68.99 1 +69.00 1 +69.01 2 +69.02 2 +69.03 1 +69.05 1 +69.06 3 +69.11 1 +69.13 2 +69.14 3 +69.16 3 +69.17 1 +69.22 1 +69.24 1 +69.26 3 +69.27 2 +69.28 2 +69.29 3 +69.30 2 +69.32 1 +69.33 6 +69.34 1 +69.36 2 +69.37 1 +69.38 1 +69.41 2 +69.42 3 +69.43 1 +69.44 1 +69.45 2 +69.46 2 +69.47 4 +69.48 2 +69.49 1 +69.52 3 +69.53 1 +69.54 1 +69.55 2 +69.58 3 +69.60 2 +69.62 1 +69.64 1 +69.67 1 +69.68 1 +69.72 2 +69.75 2 +69.76 4 +69.78 1 +69.79 2 +69.80 2 +69.81 1 +69.82 1 +69.84 1 +69.85 4 +69.86 1 +69.87 1 +69.88 1 +69.89 1 +69.90 2 +69.91 1 +69.92 2 +69.93 1 +69.94 1 +69.96 1 +69.97 1 +69.98 3 +7.00 1 +7.02 1 +7.03 3 +7.04 2 +7.06 2 +7.08 2 +7.09 1 +7.10 2 +7.11 1 +7.12 1 +7.13 1 +7.14 2 +7.16 1 +7.17 1 +7.19 2 +7.20 1 +7.21 1 +7.22 1 +7.23 1 +7.24 1 +7.26 1 +7.27 2 +7.28 2 +7.30 1 +7.31 1 +7.33 1 +7.34 2 +7.36 4 +7.37 3 +7.38 2 +7.39 3 +7.42 1 +7.45 1 +7.46 2 +7.49 3 +7.50 1 +7.51 1 +7.52 2 +7.54 1 +7.57 2 +7.59 4 +7.61 2 +7.62 1 +7.66 1 +7.67 4 +7.71 1 +7.72 1 +7.73 1 +7.74 1 +7.76 1 +7.80 2 +7.81 1 +7.82 2 +7.83 2 +7.84 1 +7.87 1 +7.88 2 +7.89 2 +7.90 2 +7.92 3 +7.93 1 +7.94 1 +7.95 2 +7.97 3 +7.98 1 +7.99 2 +70.00 1 +70.01 3 +70.02 3 +70.06 2 +70.08 2 +70.10 2 +70.12 1 +70.13 1 +70.14 2 +70.15 1 +70.16 1 +70.25 1 +70.27 1 +70.29 1 +70.31 1 +70.32 2 +70.36 1 +70.38 1 +70.41 1 +70.43 1 +70.45 2 +70.48 1 +70.49 1 +70.50 1 +70.51 2 +70.52 3 +70.53 1 +70.54 2 +70.57 3 +70.58 2 +70.59 1 +70.60 1 +70.62 1 +70.63 2 +70.64 2 +70.66 1 +70.67 2 +70.70 2 +70.74 1 +70.75 1 +70.76 1 +70.78 3 +70.79 1 +70.80 2 +70.81 3 +70.82 1 +70.84 1 +70.87 1 +70.88 1 +70.89 2 +70.90 1 +70.91 2 +70.92 2 +70.93 1 +70.96 1 +71.00 1 +71.01 1 +71.02 1 +71.05 1 +71.07 3 +71.08 1 +71.09 1 +71.12 1 +71.14 1 +71.15 3 +71.16 1 +71.17 1 +71.20 1 +71.21 1 +71.22 1 +71.24 1 +71.25 1 +71.27 1 +71.28 1 +71.29 1 +71.30 1 +71.33 1 +71.35 2 +71.37 1 +71.38 1 +71.39 2 +71.42 1 +71.43 1 +71.45 1 +71.48 1 +71.49 1 +71.51 2 +71.53 1 +71.54 1 +71.55 1 +71.56 1 +71.57 3 +71.59 2 +71.61 1 +71.62 2 +71.65 1 +71.66 5 +71.67 1 +71.72 2 +71.73 1 +71.75 1 +71.76 1 +71.77 1 +71.78 1 +71.79 2 +71.80 2 +71.82 2 +71.83 3 +71.84 2 +71.86 1 +71.87 3 +71.88 1 +71.89 1 +71.90 3 +71.91 1 +71.93 1 +71.94 3 +71.95 1 +71.97 1 +71.99 2 +72.00 2 +72.03 1 +72.04 4 +72.07 1 +72.09 1 +72.10 1 +72.11 1 +72.13 2 +72.14 1 +72.15 1 +72.16 3 +72.17 2 +72.19 1 +72.22 3 +72.23 1 +72.24 1 +72.30 2 +72.31 2 +72.32 1 +72.33 3 +72.36 1 +72.37 2 +72.39 3 +72.41 1 +72.43 1 +72.44 2 +72.45 1 +72.46 2 +72.47 3 +72.48 1 +72.50 2 +72.53 1 +72.54 4 +72.55 3 +72.56 1 +72.58 2 +72.59 2 +72.60 1 +72.62 2 +72.63 1 +72.64 1 +72.65 2 +72.67 2 +72.69 1 +72.70 2 +72.73 1 +72.75 1 +72.76 1 +72.79 1 +72.80 1 +72.82 2 +72.83 2 +72.84 1 +72.86 1 +72.87 2 +72.89 1 +72.90 3 +72.91 1 +72.92 1 +72.95 2 +72.96 1 +72.97 1 +72.99 3 +73.01 1 +73.02 1 +73.03 1 +73.06 2 +73.07 3 +73.08 3 +73.09 2 +73.12 1 +73.13 2 +73.16 2 +73.20 2 +73.21 1 +73.23 1 +73.24 1 +73.27 1 +73.28 1 +73.32 1 +73.35 2 +73.37 1 +73.38 1 +73.39 1 +73.41 2 +73.42 1 +73.45 2 +73.46 3 +73.47 2 +73.48 4 +73.51 2 +73.52 1 +73.54 2 +73.55 1 +73.56 1 +73.58 1 +73.59 1 +73.61 1 +73.64 3 +73.65 1 +73.66 1 +73.67 1 +73.68 1 +73.69 1 +73.70 2 +73.72 1 +73.73 1 +73.75 3 +73.76 1 +73.77 1 +73.79 2 +73.80 3 +73.84 1 +73.85 1 +73.86 1 +73.88 1 +73.89 2 +73.90 1 +73.91 1 +73.93 2 +73.94 1 +73.95 1 +73.97 1 +73.98 2 +73.99 1 +74.00 1 +74.01 1 +74.02 1 +74.06 1 +74.07 1 +74.08 2 +74.09 2 +74.10 1 +74.11 1 +74.12 1 +74.13 3 +74.14 2 +74.16 1 +74.17 2 +74.22 1 +74.23 2 +74.24 1 +74.25 3 +74.28 1 +74.29 2 +74.31 2 +74.32 2 +74.33 1 +74.36 2 +74.38 2 +74.39 1 +74.41 1 +74.42 1 +74.44 2 +74.46 1 +74.47 1 +74.48 3 +74.49 1 +74.50 1 +74.51 1 +74.52 2 +74.53 1 +74.55 1 +74.57 1 +74.58 1 +74.59 2 +74.60 4 +74.61 1 +74.63 1 +74.64 2 +74.67 1 +74.68 1 +74.69 1 +74.72 1 +74.73 4 +74.75 2 +74.76 1 +74.77 1 +74.79 1 +74.80 1 +74.81 1 +74.83 1 +74.84 1 +74.85 1 +74.89 1 +74.91 1 +74.92 1 +74.93 1 +74.95 2 +74.97 1 +74.98 1 +75.00 1 +75.02 2 +75.03 1 +75.04 4 +75.06 3 +75.07 1 +75.09 2 +75.10 3 +75.11 3 +75.14 3 +75.15 3 +75.18 2 +75.20 1 +75.21 1 +75.22 1 +75.23 1 +75.25 1 +75.26 1 +75.27 1 +75.29 1 +75.30 1 +75.32 1 +75.33 2 +75.36 1 +75.39 3 +75.40 2 +75.42 1 +75.43 1 +75.44 1 +75.45 2 +75.46 1 +75.48 2 +75.50 1 +75.51 1 +75.52 1 +75.53 2 +75.55 1 +75.56 1 +75.57 2 +75.58 1 +75.59 1 +75.61 1 +75.62 2 +75.64 2 +75.65 1 +75.66 2 +75.68 1 +75.69 1 +75.70 2 +75.71 2 +75.73 2 +75.74 1 +75.75 1 +75.76 2 +75.77 2 +75.78 2 +75.79 1 +75.80 3 +75.81 1 +75.82 1 +75.84 2 +75.85 1 +75.86 1 +75.87 2 +75.88 4 +75.90 1 +75.91 1 +75.92 1 +75.95 2 +75.97 3 +75.99 2 +76.00 2 +76.01 1 +76.02 1 +76.03 1 +76.04 1 +76.06 3 +76.07 1 +76.09 4 +76.11 1 +76.12 1 +76.13 1 +76.14 1 +76.16 2 +76.17 3 +76.18 2 +76.19 3 +76.20 4 +76.21 3 +76.24 1 +76.25 1 +76.26 1 +76.27 1 +76.28 1 +76.29 2 +76.30 1 +76.31 3 +76.33 1 +76.34 1 +76.35 2 +76.37 1 +76.39 1 +76.40 2 +76.44 1 +76.45 1 +76.46 1 +76.49 1 +76.50 1 +76.51 2 +76.52 1 +76.53 1 +76.54 3 +76.57 2 +76.58 3 +76.60 1 +76.61 1 +76.63 2 +76.64 3 +76.65 3 +76.67 1 +76.70 1 +76.72 2 +76.73 1 +76.75 2 +76.76 1 +76.77 2 +76.79 1 +76.80 3 +76.81 1 +76.83 2 +76.87 2 +76.88 1 +76.89 1 +76.92 1 +76.93 2 +76.95 4 +76.97 1 +76.98 2 +76.99 1 +77.00 1 +77.01 1 +77.02 1 +77.03 1 +77.06 1 +77.07 3 +77.09 1 +77.10 1 +77.12 1 +77.13 2 +77.16 2 +77.17 1 +77.18 3 +77.21 2 +77.22 1 +77.23 1 +77.24 1 +77.25 1 +77.26 1 +77.27 1 +77.29 2 +77.30 1 +77.31 1 +77.32 1 +77.33 1 +77.34 1 +77.35 1 +77.38 1 +77.39 2 +77.40 3 +77.41 1 +77.43 1 +77.44 3 +77.45 1 +77.48 1 +77.51 1 +77.52 2 +77.53 1 +77.55 2 +77.56 2 +77.57 3 +77.59 1 +77.60 1 +77.61 2 +77.62 2 +77.63 2 +77.64 2 +77.66 3 +77.72 1 +77.75 5 +77.76 1 +77.77 2 +77.78 2 +77.79 1 +77.80 1 +77.83 1 +77.84 1 +77.85 1 +77.86 1 +77.87 2 +77.88 2 +77.92 2 +77.93 2 +77.95 2 +77.96 1 +77.97 1 +77.99 1 +78.01 1 +78.05 3 +78.07 2 +78.11 1 +78.13 1 +78.14 1 +78.15 1 +78.16 1 +78.17 1 +78.18 1 +78.22 2 +78.24 2 +78.25 1 +78.28 1 +78.30 1 +78.32 1 +78.33 1 +78.35 2 +78.36 3 +78.39 1 +78.40 1 +78.41 2 +78.43 1 +78.45 1 +78.46 1 +78.48 2 +78.52 1 +78.53 1 +78.55 3 +78.56 3 +78.57 1 +78.59 1 +78.60 1 +78.62 2 +78.65 1 +78.66 1 +78.69 2 +78.73 2 +78.75 3 +78.76 1 +78.78 1 +78.80 1 +78.83 2 +78.84 2 +78.85 3 +78.86 2 +78.90 1 +78.91 1 +78.92 3 +78.93 2 +78.95 1 +78.97 1 +78.98 1 +79.00 1 +79.02 4 +79.03 2 +79.04 3 +79.06 1 +79.07 2 +79.08 4 +79.09 3 +79.10 2 +79.11 1 +79.13 2 +79.14 1 +79.15 2 +79.16 2 +79.17 1 +79.20 2 +79.21 1 +79.22 1 +79.25 1 +79.26 1 +79.28 2 +79.29 1 +79.30 1 +79.32 1 +79.33 2 +79.34 1 +79.37 1 +79.38 1 +79.39 1 +79.40 2 +79.41 2 +79.42 1 +79.44 1 +79.45 2 +79.46 2 +79.48 1 +79.49 1 +79.50 1 +79.51 2 +79.53 1 +79.55 1 +79.56 1 +79.57 2 +79.58 1 +79.59 1 +79.60 3 +79.63 2 +79.64 1 +79.65 3 +79.66 1 +79.67 1 +79.69 1 +79.74 1 +79.75 1 +79.77 2 +79.80 2 +79.81 1 +79.83 1 +79.84 1 +79.85 3 +79.88 1 +79.89 1 +79.90 1 +79.91 1 +79.92 2 +79.93 2 +79.94 1 +79.95 1 +79.96 2 +79.99 1 +8.00 1 +8.01 1 +8.02 1 +8.03 1 +8.04 1 +8.06 3 +8.07 1 +8.08 1 +8.09 1 +8.12 2 +8.13 1 +8.16 1 +8.17 1 +8.22 1 +8.25 1 +8.26 1 +8.27 1 +8.28 2 +8.30 2 +8.31 1 +8.33 1 +8.34 1 +8.35 2 +8.38 2 +8.39 1 +8.40 1 +8.43 1 +8.44 1 +8.46 1 +8.48 4 +8.49 1 +8.52 1 +8.53 1 +8.54 1 +8.55 2 +8.57 3 +8.58 1 +8.59 1 +8.60 1 +8.62 1 +8.63 2 +8.64 3 +8.65 4 +8.66 1 +8.67 1 +8.68 1 +8.69 3 +8.71 1 +8.72 1 +8.73 2 +8.74 1 +8.76 3 +8.77 2 +8.78 2 +8.80 1 +8.84 2 +8.85 2 +8.86 3 +8.88 1 +8.89 2 +8.90 3 +8.91 3 +8.93 1 +8.94 1 +8.95 2 +8.96 1 +8.97 1 +8.98 2 +8.99 1 +80.00 1 +80.01 1 +80.02 1 +80.05 2 +80.08 2 +80.09 5 +80.10 1 +80.11 3 +80.12 1 +80.13 2 +80.14 1 +80.15 1 +80.21 1 +80.24 2 +80.26 1 +80.27 1 +80.30 1 +80.31 2 +80.36 1 +80.38 1 +80.40 2 +80.42 1 +80.43 1 +80.44 1 +80.45 2 +80.47 1 +80.48 2 +80.50 1 +80.51 2 +80.52 1 +80.54 1 +80.55 2 +80.56 1 +80.59 1 +80.61 2 +80.62 1 +80.63 1 +80.64 1 +80.68 1 +80.69 3 +80.71 1 +80.72 2 +80.73 2 +80.76 2 +80.77 2 +80.80 1 +80.81 4 +80.82 3 +80.83 3 +80.84 1 +80.85 1 +80.88 1 +80.90 1 +80.93 1 +80.94 1 +80.95 1 +80.96 3 +80.97 2 +80.98 1 +80.99 2 +81.02 2 +81.03 3 +81.04 3 +81.06 1 +81.10 1 +81.11 1 +81.13 2 +81.14 1 +81.15 1 +81.16 2 +81.18 1 +81.19 1 +81.20 1 +81.21 2 +81.22 1 +81.23 3 +81.24 1 +81.26 1 +81.27 2 +81.28 3 +81.29 1 +81.30 1 +81.35 3 +81.36 1 +81.37 3 +81.38 1 +81.39 1 +81.40 1 +81.41 1 +81.42 2 +81.43 1 +81.44 3 +81.49 1 +81.50 1 +81.51 3 +81.53 1 +81.54 2 +81.57 1 +81.58 2 +81.59 1 +81.60 1 +81.61 1 +81.66 1 +81.67 1 +81.68 1 +81.69 1 +81.70 2 +81.71 1 +81.72 1 +81.74 2 +81.76 1 +81.78 1 +81.80 1 +81.81 1 +81.85 2 +81.87 2 +81.88 1 +81.89 1 +81.90 1 +81.91 1 +81.93 2 +81.95 2 +81.96 3 +81.98 2 +81.99 2 +82.00 1 +82.02 2 +82.03 1 +82.04 2 +82.05 1 +82.06 2 +82.07 1 +82.08 2 +82.10 2 +82.12 2 +82.16 1 +82.19 1 +82.20 2 +82.21 2 +82.23 1 +82.24 2 +82.25 2 +82.28 2 +82.29 2 +82.31 1 +82.32 1 +82.33 2 +82.34 1 +82.36 2 +82.37 1 +82.39 4 +82.40 1 +82.41 1 +82.43 1 +82.44 1 +82.46 1 +82.47 1 +82.48 3 +82.49 1 +82.50 1 +82.52 1 +82.53 4 +82.54 1 +82.56 1 +82.57 1 +82.59 1 +82.60 1 +82.61 2 +82.62 1 +82.64 2 +82.67 2 +82.68 1 +82.69 1 +82.70 3 +82.71 2 +82.72 1 +82.73 1 +82.74 2 +82.75 1 +82.78 2 +82.79 3 +82.83 3 +82.85 1 +82.87 1 +82.89 1 +82.90 2 +82.91 1 +82.92 1 +82.93 1 +82.94 3 +82.95 1 +82.96 2 +82.98 2 +82.99 1 +83.00 2 +83.01 2 +83.03 3 +83.04 1 +83.05 1 +83.06 2 +83.08 1 +83.09 1 +83.10 3 +83.12 1 +83.13 2 +83.15 2 +83.16 1 +83.18 1 +83.19 1 +83.20 1 +83.21 5 +83.22 1 +83.23 1 +83.24 1 +83.26 3 +83.27 2 +83.29 2 +83.31 1 +83.33 2 +83.36 1 +83.37 2 +83.38 1 +83.39 1 +83.40 1 +83.42 2 +83.43 1 +83.44 2 +83.46 2 +83.47 1 +83.48 3 +83.49 1 +83.50 2 +83.51 1 +83.52 2 +83.53 1 +83.54 1 +83.60 1 +83.61 1 +83.62 1 +83.63 2 +83.64 3 +83.65 1 +83.66 2 +83.67 1 +83.69 1 +83.71 1 +83.73 1 +83.75 1 +83.76 1 +83.78 1 +83.79 2 +83.80 2 +83.81 1 +83.82 3 +83.83 1 +83.85 1 +83.86 2 +83.89 2 +83.92 2 +83.93 1 +83.94 2 +83.95 4 +83.96 1 +84.01 2 +84.02 2 +84.03 2 +84.05 1 +84.07 2 +84.08 2 +84.10 2 +84.12 2 +84.13 1 +84.17 3 +84.18 1 +84.20 2 +84.23 2 +84.25 1 +84.26 2 +84.27 1 +84.28 1 +84.29 2 +84.32 1 +84.35 3 +84.37 3 +84.38 1 +84.40 3 +84.41 2 +84.42 3 +84.43 1 +84.44 2 +84.45 1 +84.46 1 +84.47 1 +84.50 1 +84.55 1 +84.56 1 +84.57 1 +84.58 1 +84.60 3 +84.61 2 +84.62 1 +84.63 2 +84.64 1 +84.65 1 +84.69 2 +84.70 1 +84.71 1 +84.72 1 +84.73 1 +84.75 1 +84.76 1 +84.78 1 +84.79 1 +84.80 2 +84.81 1 +84.83 2 +84.86 1 +84.87 1 +84.88 2 +84.90 3 +84.91 1 +84.92 1 +84.93 2 +84.95 2 +84.96 2 +84.97 3 +84.99 1 +85.00 1 +85.01 3 +85.02 1 +85.08 1 +85.09 2 +85.13 1 +85.16 1 +85.17 1 +85.19 1 +85.21 1 +85.22 2 +85.25 1 +85.26 5 +85.27 1 +85.28 1 +85.29 1 +85.31 1 +85.34 2 +85.36 1 +85.37 1 +85.38 2 +85.39 2 +85.40 3 +85.41 1 +85.43 3 +85.44 2 +85.48 1 +85.49 1 +85.50 2 +85.52 3 +85.54 1 +85.55 3 +85.58 1 +85.59 1 +85.60 1 +85.61 1 +85.62 1 +85.63 1 +85.64 2 +85.65 4 +85.67 1 +85.68 1 +85.70 2 +85.73 1 +85.74 1 +85.75 1 +85.76 1 +85.79 1 +85.80 1 +85.82 2 +85.83 2 +85.85 1 +85.86 1 +85.87 1 +85.88 2 +85.89 1 +85.90 1 +85.91 1 +85.92 1 +85.93 1 +85.94 2 +85.97 1 +85.98 1 +85.99 1 +86.02 2 +86.04 1 +86.10 1 +86.11 1 +86.12 2 +86.13 1 +86.14 1 +86.15 2 +86.18 1 +86.19 3 +86.20 1 +86.21 1 +86.23 2 +86.24 1 +86.25 1 +86.26 2 +86.27 2 +86.28 1 +86.29 3 +86.33 1 +86.34 2 +86.38 2 +86.39 1 +86.40 1 +86.41 2 +86.43 2 +86.44 1 +86.49 2 +86.50 1 +86.51 3 +86.52 2 +86.53 1 +86.54 1 +86.58 1 +86.59 1 +86.60 3 +86.61 2 +86.64 1 +86.65 2 +86.66 2 +86.69 1 +86.70 3 +86.71 2 +86.72 1 +86.73 1 +86.75 3 +86.76 1 +86.78 5 +86.79 2 +86.82 1 +86.84 2 +86.85 6 +86.86 5 +86.87 1 +86.90 1 +86.92 1 +86.93 1 +86.94 1 +86.95 1 +86.97 1 +87.00 1 +87.01 1 +87.02 1 +87.04 1 +87.05 2 +87.06 1 +87.07 1 +87.10 1 +87.11 1 +87.12 1 +87.16 1 +87.17 2 +87.18 1 +87.19 2 +87.21 2 +87.24 1 +87.28 1 +87.29 3 +87.30 1 +87.31 1 +87.32 2 +87.34 1 +87.36 1 +87.37 1 +87.38 2 +87.41 3 +87.42 1 +87.43 2 +87.44 1 +87.47 2 +87.48 1 +87.49 1 +87.50 2 +87.52 5 +87.56 1 +87.57 2 +87.58 2 +87.59 1 +87.60 2 +87.61 1 +87.62 1 +87.63 1 +87.65 2 +87.66 1 +87.67 3 +87.69 1 +87.70 1 +87.71 2 +87.72 1 +87.73 1 +87.75 1 +87.76 2 +87.78 2 +87.79 1 +87.80 1 +87.83 1 +87.84 2 +87.86 1 +87.89 2 +87.90 1 +87.91 1 +87.92 1 +87.93 1 +87.95 1 +87.96 2 +87.97 3 +87.99 1 +88.01 1 +88.03 1 +88.04 1 +88.05 2 +88.06 2 +88.08 2 +88.09 2 +88.10 3 +88.11 4 +88.12 1 +88.13 1 +88.14 1 +88.16 1 +88.18 1 +88.19 2 +88.20 1 +88.23 3 +88.24 1 +88.25 1 +88.29 2 +88.30 1 +88.31 2 +88.34 1 +88.35 1 +88.38 3 +88.39 2 +88.42 1 +88.45 2 +88.46 3 +88.50 2 +88.51 2 +88.52 1 +88.55 2 +88.56 1 +88.58 1 +88.60 3 +88.61 1 +88.64 1 +88.65 2 +88.67 1 +88.68 1 +88.69 3 +88.73 1 +88.74 2 +88.75 3 +88.78 2 +88.79 2 +88.80 3 +88.81 1 +88.83 3 +88.87 1 +88.88 1 +88.90 1 +88.92 1 +88.93 1 +88.94 2 +88.95 1 +88.96 1 +88.99 2 +89.00 3 +89.02 1 +89.05 1 +89.06 1 +89.08 1 +89.09 1 +89.11 1 +89.12 1 +89.14 5 +89.16 2 +89.18 2 +89.20 1 +89.23 3 +89.24 1 +89.25 2 +89.26 1 +89.28 1 +89.29 1 +89.31 2 +89.32 3 +89.34 1 +89.35 3 +89.38 1 +89.39 1 +89.40 4 +89.41 1 +89.46 1 +89.49 1 +89.50 2 +89.51 1 +89.52 3 +89.53 2 +89.54 3 +89.55 1 +89.58 2 +89.60 4 +89.61 1 +89.62 3 +89.64 1 +89.66 2 +89.67 3 +89.71 1 +89.73 1 +89.75 1 +89.77 3 +89.79 2 +89.80 1 +89.81 1 +89.84 2 +89.86 1 +89.87 3 +89.89 1 +89.92 1 +89.93 1 +89.94 1 +89.95 2 +89.97 2 +89.98 1 +89.99 1 +9.00 1 +9.01 2 +9.02 5 +9.04 1 +9.05 2 +9.06 1 +9.07 2 +9.08 1 +9.09 1 +9.10 2 +9.11 1 +9.12 1 +9.13 1 +9.14 1 +9.15 1 +9.17 1 +9.18 2 +9.20 1 +9.24 2 +9.25 1 +9.29 2 +9.33 1 +9.35 2 +9.37 2 +9.39 1 +9.42 1 +9.43 1 +9.44 1 +9.45 1 +9.46 1 +9.47 3 +9.49 2 +9.50 1 +9.52 1 +9.53 1 +9.55 1 +9.56 1 +9.58 2 +9.59 2 +9.60 1 +9.63 1 +9.64 1 +9.67 1 +9.71 1 +9.73 1 +9.74 1 +9.76 1 +9.77 1 +9.78 1 +9.82 2 +9.84 2 +9.85 1 +9.86 1 +9.87 1 +9.88 2 +9.89 2 +9.91 1 +9.92 3 +9.93 1 +9.94 1 +9.95 1 +9.96 1 +9.97 1 +9.98 2 +9.99 2 +90.01 3 +90.02 3 +90.05 1 +90.08 2 +90.09 2 +90.13 1 +90.15 2 +90.16 2 +90.17 1 +90.19 2 +90.21 1 +90.22 1 +90.24 1 +90.26 2 +90.27 1 +90.28 1 +90.29 1 +90.30 2 +90.31 2 +90.33 1 +90.34 2 +90.36 3 +90.37 2 +90.38 5 +90.39 2 +90.40 2 +90.42 1 +90.43 1 +90.45 2 +90.46 1 +90.47 1 +90.50 1 +90.51 2 +90.53 1 +90.55 2 +90.56 3 +90.58 1 +90.59 1 +90.62 1 +90.64 1 +90.65 1 +90.66 2 +90.68 1 +90.69 2 +90.70 2 +90.71 1 +90.72 1 +90.73 1 +90.74 2 +90.75 1 +90.77 1 +90.78 1 +90.79 1 +90.82 1 +90.83 2 +90.84 1 +90.85 1 +90.87 1 +90.88 2 +90.89 2 +90.90 2 +90.91 2 +90.93 1 +90.95 2 +90.98 1 +91.00 2 +91.03 3 +91.05 1 +91.07 1 +91.08 1 +91.09 3 +91.10 1 +91.11 2 +91.12 2 +91.13 1 +91.15 1 +91.16 1 +91.19 2 +91.22 1 +91.28 1 +91.30 1 +91.31 1 +91.33 2 +91.34 1 +91.35 2 +91.37 3 +91.38 2 +91.40 1 +91.41 1 +91.42 1 +91.43 2 +91.44 1 +91.45 1 +91.46 2 +91.47 1 +91.49 1 +91.51 4 +91.52 1 +91.54 1 +91.56 3 +91.57 1 +91.58 1 +91.59 1 +91.60 1 +91.66 1 +91.67 1 +91.68 1 +91.69 1 +91.70 1 +91.71 1 +91.72 1 +91.75 1 +91.76 1 +91.77 1 +91.80 1 +91.81 1 +91.85 2 +91.87 1 +91.89 1 +91.90 2 +91.93 1 +91.94 1 +91.96 1 +91.98 1 +92.01 2 +92.03 1 +92.05 1 +92.06 1 +92.08 1 +92.13 1 +92.14 1 +92.16 1 +92.17 1 +92.18 1 +92.21 1 +92.22 3 +92.23 1 +92.24 2 +92.27 1 +92.28 1 +92.29 1 +92.30 1 +92.31 1 +92.32 1 +92.34 1 +92.35 1 +92.36 1 +92.38 1 +92.39 1 +92.40 1 +92.41 1 +92.42 1 +92.44 1 +92.47 2 +92.48 1 +92.50 2 +92.52 1 +92.53 1 +92.56 2 +92.57 1 +92.59 2 +92.60 1 +92.61 2 +92.63 2 +92.64 1 +92.67 1 +92.71 2 +92.72 2 +92.73 1 +92.74 3 +92.75 1 +92.78 1 +92.79 2 +92.81 2 +92.82 2 +92.86 1 +92.88 1 +92.89 1 +92.90 1 +92.91 1 +92.92 2 +92.93 2 +92.94 2 +92.95 3 +92.97 3 +92.99 1 +93.00 1 +93.01 3 +93.03 3 +93.05 3 +93.08 1 +93.09 2 +93.10 1 +93.11 2 +93.13 1 +93.16 2 +93.17 2 +93.18 1 +93.19 1 +93.21 2 +93.22 1 +93.23 1 +93.24 3 +93.26 1 +93.27 1 +93.28 1 +93.29 2 +93.30 2 +93.31 1 +93.32 1 +93.35 1 +93.36 1 +93.38 1 +93.39 1 +93.40 1 +93.41 3 +93.43 2 +93.45 1 +93.46 2 +93.48 1 +93.49 3 +93.50 1 +93.52 2 +93.54 1 +93.55 1 +93.57 2 +93.58 1 +93.60 1 +93.62 2 +93.63 1 +93.64 1 +93.65 2 +93.66 2 +93.68 1 +93.69 1 +93.70 2 +93.71 1 +93.73 2 +93.74 1 +93.77 2 +93.78 1 +93.79 3 +93.81 1 +93.83 2 +93.84 2 +93.85 2 +93.86 1 +93.87 1 +93.89 1 +93.90 1 +93.93 2 +93.95 3 +93.96 1 +93.97 3 +93.98 2 +93.99 2 +94.01 1 +94.02 1 +94.03 4 +94.04 1 +94.05 1 +94.06 1 +94.09 2 +94.11 1 +94.12 1 +94.13 3 +94.14 1 +94.16 2 +94.17 1 +94.23 2 +94.25 1 +94.26 1 +94.30 4 +94.32 1 +94.33 1 +94.34 2 +94.35 2 +94.39 2 +94.42 1 +94.43 1 +94.44 3 +94.45 1 +94.46 2 +94.47 6 +94.48 1 +94.49 1 +94.50 1 +94.51 1 +94.52 1 +94.53 1 +94.56 1 +94.58 2 +94.59 2 +94.60 3 +94.61 2 +94.62 1 +94.63 4 +94.68 1 +94.69 2 +94.71 1 +94.73 1 +94.74 1 +94.75 1 +94.77 2 +94.78 1 +94.80 1 +94.82 2 +94.83 2 +94.85 1 +94.86 2 +94.87 1 +94.88 2 +94.89 1 +94.90 2 +94.91 3 +94.92 1 +94.95 1 +94.97 1 +94.99 1 +95.02 1 +95.05 1 +95.06 2 +95.07 3 +95.08 1 +95.09 1 +95.10 1 +95.11 2 +95.12 4 +95.13 2 +95.14 2 +95.15 1 +95.16 1 +95.20 2 +95.21 1 +95.23 1 +95.24 1 +95.26 1 +95.28 1 +95.32 1 +95.33 1 +95.34 2 +95.37 1 +95.38 1 +95.39 1 +95.40 2 +95.41 1 +95.43 1 +95.45 3 +95.46 4 +95.48 2 +95.51 1 +95.52 1 +95.54 2 +95.56 2 +95.57 1 +95.58 2 +95.59 1 +95.61 3 +95.62 1 +95.63 2 +95.64 1 +95.65 1 +95.67 3 +95.68 1 +95.69 4 +95.70 1 +95.71 2 +95.72 1 +95.74 1 +95.79 1 +95.80 2 +95.81 3 +95.83 2 +95.84 1 +95.87 3 +95.88 1 +95.89 3 +95.90 2 +95.93 1 +95.94 3 +95.99 1 +96.01 1 +96.02 1 +96.04 1 +96.05 2 +96.06 2 +96.07 2 +96.08 1 +96.09 1 +96.10 2 +96.13 2 +96.20 2 +96.21 1 +96.22 1 +96.23 1 +96.24 1 +96.25 1 +96.26 1 +96.28 2 +96.30 2 +96.31 1 +96.32 1 +96.34 2 +96.35 2 +96.36 1 +96.38 1 +96.39 2 +96.42 1 +96.43 1 +96.44 1 +96.45 5 +96.46 1 +96.47 1 +96.48 2 +96.50 1 +96.51 1 +96.52 1 +96.53 2 +96.54 1 +96.55 1 +96.57 1 +96.58 1 +96.59 1 +96.60 4 +96.61 1 +96.62 2 +96.63 1 +96.64 2 +96.66 2 +96.68 2 +96.69 2 +96.71 2 +96.72 1 +96.74 2 +96.76 4 +96.77 1 +96.79 1 +96.80 2 +96.81 2 +96.82 1 +96.83 1 +96.85 2 +96.87 1 +96.89 3 +96.90 2 +96.92 1 +96.95 2 +96.96 1 +96.97 1 +96.98 2 +96.99 1 +97.01 1 +97.03 2 +97.07 1 +97.09 1 +97.10 2 +97.11 3 +97.15 2 +97.16 1 +97.18 2 +97.20 2 +97.21 1 +97.22 2 +97.24 1 +97.25 2 +97.26 1 +97.28 2 +97.29 1 +97.31 1 +97.32 2 +97.34 1 +97.35 1 +97.37 2 +97.38 2 +97.39 1 +97.41 2 +97.42 4 +97.43 1 +97.48 2 +97.49 2 +97.50 2 +97.52 2 +97.53 1 +97.55 1 +97.56 4 +97.57 1 +97.59 1 +97.60 1 +97.62 1 +97.64 1 +97.65 1 +97.66 1 +97.68 1 +97.69 2 +97.70 2 +97.72 1 +97.74 3 +97.75 1 +97.76 1 +97.77 1 +97.78 1 +97.81 2 +97.82 1 +97.84 1 +97.85 1 +97.86 2 +97.87 1 +97.88 1 +97.91 1 +97.94 2 +97.96 1 +97.98 2 +97.99 2 +98.00 1 +98.01 1 +98.03 1 +98.04 1 +98.06 3 +98.07 2 +98.08 1 +98.10 3 +98.11 3 +98.12 1 +98.13 3 +98.15 3 +98.16 1 +98.18 1 +98.20 1 +98.22 1 +98.23 1 +98.24 2 +98.25 3 +98.26 2 +98.28 2 +98.29 2 +98.32 1 +98.33 1 +98.35 2 +98.36 2 +98.38 2 +98.39 1 +98.41 1 +98.42 3 +98.43 1 +98.44 1 +98.46 1 +98.48 2 +98.50 1 +98.51 3 +98.52 1 +98.53 1 +98.54 1 +98.56 1 +98.58 1 +98.60 1 +98.63 1 +98.64 2 +98.67 3 +98.68 2 +98.70 1 +98.72 1 +98.73 1 +98.76 2 +98.77 1 +98.79 1 +98.80 2 +98.82 1 +98.83 1 +98.84 1 +98.86 2 +98.87 2 +98.88 1 +98.89 1 +98.90 5 +98.91 1 +98.93 1 +98.97 1 +98.99 2 +99.00 3 +99.01 1 +99.02 1 +99.03 1 +99.04 1 +99.05 3 +99.06 1 +99.08 3 +99.10 1 +99.12 1 +99.13 1 +99.14 1 +99.16 1 +99.18 2 +99.19 2 +99.20 1 +99.21 1 +99.22 1 +99.23 1 +99.24 2 +99.27 2 +99.28 3 +99.29 1 +99.31 3 +99.32 1 +99.33 3 +99.34 2 +99.35 2 +99.37 3 +99.38 1 +99.40 1 +99.41 1 +99.42 3 +99.44 1 +99.46 2 +99.48 1 +99.50 4 +99.51 1 +99.52 1 +99.53 2 +99.54 1 +99.55 3 +99.56 2 +99.57 2 +99.58 3 +99.60 1 +99.62 1 +99.63 2 +99.64 2 +99.66 1 +99.67 1 +99.69 1 +99.76 2 +99.77 1 +99.78 1 +99.79 2 +99.80 1 +99.82 2 +99.84 1 +99.85 1 +99.86 3 +99.87 3 +99.88 1 +99.89 1 +99.90 2 +99.92 1 +99.94 2 +99.95 1 +99.96 1 +99.98 1 +99.99 1 +PREHOOK: query: explain vectorization expression +select i, count(i) from over10k group by i +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select i, count(i) from over10k group by i +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: i (type: int) + outputColumnNames: i + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(i) + Group By Vectorization: + aggregators: VectorUDAFCount(col 2:int) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 2:int + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: i (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select i, count(i) from over10k group by i +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select i, count(i) from over10k group by i +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +65536 45 +65537 35 +65538 29 +65539 24 +65540 29 +65541 43 +65542 37 +65543 40 +65544 42 +65545 39 +65546 34 +65547 47 +65548 44 +65549 34 +65550 36 +65551 32 +65552 36 +65553 45 +65554 44 +65555 40 +65556 40 +65557 35 +65558 41 +65559 28 +65560 44 +65561 24 +65562 50 +65563 41 +65564 44 +65565 31 +65566 46 +65567 31 +65568 48 +65569 44 +65570 42 +65571 39 +65572 38 +65573 36 +65574 31 +65575 44 +65576 36 +65577 44 +65578 47 +65579 30 +65580 42 +65581 46 +65582 37 +65583 40 +65584 31 +65585 38 +65586 35 +65587 40 +65588 37 +65589 43 +65590 42 +65591 37 +65592 39 +65593 38 +65594 41 +65595 53 +65596 33 +65597 26 +65598 31 +65599 34 +65600 49 +65601 37 +65602 35 +65603 41 +65604 45 +65605 38 +65606 46 +65607 40 +65608 26 +65609 51 +65610 30 +65611 40 +65612 35 +65613 28 +65614 32 +65615 34 +65616 34 +65617 44 +65618 45 +65619 34 +65620 44 +65621 37 +65622 37 +65623 41 +65624 53 +65625 34 +65626 39 +65627 49 +65628 29 +65629 41 +65630 30 +65631 30 +65632 33 +65633 34 +65634 32 +65635 39 +65636 25 +65637 38 +65638 32 +65639 35 +65640 36 +65641 43 +65642 32 +65643 35 +65644 48 +65645 38 +65646 37 +65647 39 +65648 44 +65649 40 +65650 47 +65651 37 +65652 33 +65653 30 +65654 39 +65655 44 +65656 37 +65657 41 +65658 53 +65659 38 +65660 35 +65661 40 +65662 36 +65663 36 +65664 42 +65665 37 +65666 39 +65667 34 +65668 25 +65669 58 +65670 39 +65671 42 +65672 39 +65673 41 +65674 41 +65675 44 +65676 42 +65677 44 +65678 31 +65679 31 +65680 36 +65681 39 +65682 43 +65683 43 +65684 46 +65685 36 +65686 40 +65687 35 +65688 38 +65689 35 +65690 35 +65691 38 +65692 41 +65693 33 +65694 38 +65695 45 +65696 42 +65697 35 +65698 48 +65699 45 +65700 40 +65701 41 +65702 46 +65703 47 +65704 37 +65705 38 +65706 36 +65707 39 +65708 36 +65709 38 +65710 43 +65711 38 +65712 39 +65713 41 +65714 34 +65715 44 +65716 36 +65717 47 +65718 39 +65719 44 +65720 34 +65721 49 +65722 33 +65723 34 +65724 39 +65725 43 +65726 50 +65727 48 +65728 43 +65729 36 +65730 30 +65731 31 +65732 46 +65733 37 +65734 45 +65735 49 +65736 27 +65737 55 +65738 45 +65739 42 +65740 39 +65741 26 +65742 38 +65743 39 +65744 42 +65745 44 +65746 40 +65747 45 +65748 42 +65749 36 +65750 30 +65751 46 +65752 48 +65753 36 +65754 33 +65755 44 +65756 50 +65757 37 +65758 43 +65759 47 +65760 44 +65761 28 +65762 43 +65763 30 +65764 46 +65765 28 +65766 44 +65767 38 +65768 32 +65769 43 +65770 44 +65771 51 +65772 36 +65773 49 +65774 45 +65775 42 +65776 44 +65777 37 +65778 43 +65779 47 +65780 41 +65781 31 +65782 40 +65783 43 +65784 39 +65785 41 +65786 34 +65787 37 +65788 44 +65789 41 +65790 32 +65791 32 +PREHOOK: query: explain vectorization expression +select i, count(b) from over10k group by i +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select i, count(b) from over10k group by i +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: i (type: int), b (type: bigint) + outputColumnNames: i, b + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 3] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(b) + Group By Vectorization: + aggregators: VectorUDAFCount(col 3:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 2:int + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: i (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select i, count(b) from over10k group by i +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select i, count(b) from over10k group by i +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +65536 45 +65537 35 +65538 29 +65539 24 +65540 29 +65541 43 +65542 37 +65543 40 +65544 42 +65545 39 +65546 34 +65547 47 +65548 44 +65549 34 +65550 36 +65551 32 +65552 36 +65553 45 +65554 44 +65555 40 +65556 40 +65557 35 +65558 41 +65559 28 +65560 44 +65561 24 +65562 50 +65563 41 +65564 44 +65565 31 +65566 46 +65567 31 +65568 48 +65569 44 +65570 42 +65571 39 +65572 38 +65573 36 +65574 31 +65575 44 +65576 36 +65577 44 +65578 47 +65579 30 +65580 42 +65581 46 +65582 37 +65583 40 +65584 31 +65585 38 +65586 35 +65587 40 +65588 37 +65589 43 +65590 42 +65591 37 +65592 39 +65593 38 +65594 41 +65595 53 +65596 33 +65597 26 +65598 31 +65599 34 +65600 49 +65601 37 +65602 35 +65603 41 +65604 45 +65605 38 +65606 46 +65607 40 +65608 26 +65609 51 +65610 30 +65611 40 +65612 35 +65613 28 +65614 32 +65615 34 +65616 34 +65617 44 +65618 45 +65619 34 +65620 44 +65621 37 +65622 37 +65623 41 +65624 53 +65625 34 +65626 39 +65627 49 +65628 29 +65629 41 +65630 30 +65631 30 +65632 33 +65633 34 +65634 32 +65635 39 +65636 25 +65637 38 +65638 32 +65639 35 +65640 36 +65641 43 +65642 32 +65643 35 +65644 48 +65645 38 +65646 37 +65647 39 +65648 44 +65649 40 +65650 47 +65651 37 +65652 33 +65653 30 +65654 39 +65655 44 +65656 37 +65657 41 +65658 53 +65659 38 +65660 35 +65661 40 +65662 36 +65663 36 +65664 42 +65665 37 +65666 39 +65667 34 +65668 25 +65669 58 +65670 39 +65671 42 +65672 39 +65673 41 +65674 41 +65675 44 +65676 42 +65677 44 +65678 31 +65679 31 +65680 36 +65681 39 +65682 43 +65683 43 +65684 46 +65685 36 +65686 40 +65687 35 +65688 38 +65689 35 +65690 35 +65691 38 +65692 41 +65693 33 +65694 38 +65695 45 +65696 42 +65697 35 +65698 48 +65699 45 +65700 40 +65701 41 +65702 46 +65703 47 +65704 37 +65705 38 +65706 36 +65707 39 +65708 36 +65709 38 +65710 43 +65711 38 +65712 39 +65713 41 +65714 34 +65715 44 +65716 36 +65717 47 +65718 39 +65719 44 +65720 34 +65721 49 +65722 33 +65723 34 +65724 39 +65725 43 +65726 50 +65727 48 +65728 43 +65729 36 +65730 30 +65731 31 +65732 46 +65733 37 +65734 45 +65735 49 +65736 27 +65737 55 +65738 45 +65739 42 +65740 39 +65741 26 +65742 38 +65743 39 +65744 42 +65745 44 +65746 40 +65747 45 +65748 42 +65749 36 +65750 30 +65751 46 +65752 48 +65753 36 +65754 33 +65755 44 +65756 50 +65757 37 +65758 43 +65759 47 +65760 44 +65761 28 +65762 43 +65763 30 +65764 46 +65765 28 +65766 44 +65767 38 +65768 32 +65769 43 +65770 44 +65771 51 +65772 36 +65773 49 +65774 45 +65775 42 +65776 44 +65777 37 +65778 43 +65779 47 +65780 41 +65781 31 +65782 40 +65783 43 +65784 39 +65785 41 +65786 34 +65787 37 +65788 44 +65789 41 +65790 32 +65791 32 +PREHOOK: query: explain vectorization expression +select i, count(*) from over10k group by i +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select i, count(*) from over10k group by i +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: over10k + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: i (type: int) + outputColumnNames: i + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2] + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + Group By Vectorization: + aggregators: VectorUDAFCountStar(*) -> bigint + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 2:int + native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: i (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 10175440 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select i, count(*) from over10k group by i +PREHOOK: type: QUERY +PREHOOK: Input: default@over10k +#### A masked pattern was here #### +POSTHOOK: query: select i, count(*) from over10k group by i +POSTHOOK: type: QUERY +POSTHOOK: Input: default@over10k +#### A masked pattern was here #### +65536 45 +65537 35 +65538 29 +65539 24 +65540 29 +65541 43 +65542 37 +65543 40 +65544 42 +65545 39 +65546 34 +65547 47 +65548 44 +65549 34 +65550 36 +65551 32 +65552 36 +65553 45 +65554 44 +65555 40 +65556 40 +65557 35 +65558 41 +65559 28 +65560 44 +65561 24 +65562 50 +65563 41 +65564 44 +65565 31 +65566 46 +65567 31 +65568 48 +65569 44 +65570 42 +65571 39 +65572 38 +65573 36 +65574 31 +65575 44 +65576 36 +65577 44 +65578 47 +65579 30 +65580 42 +65581 46 +65582 37 +65583 40 +65584 31 +65585 38 +65586 35 +65587 40 +65588 37 +65589 43 +65590 42 +65591 37 +65592 39 +65593 38 +65594 41 +65595 53 +65596 33 +65597 26 +65598 31 +65599 34 +65600 49 +65601 37 +65602 35 +65603 41 +65604 45 +65605 38 +65606 46 +65607 40 +65608 26 +65609 51 +65610 30 +65611 40 +65612 35 +65613 28 +65614 32 +65615 34 +65616 34 +65617 44 +65618 45 +65619 34 +65620 44 +65621 37 +65622 37 +65623 41 +65624 53 +65625 34 +65626 39 +65627 49 +65628 29 +65629 41 +65630 30 +65631 30 +65632 33 +65633 34 +65634 32 +65635 39 +65636 25 +65637 38 +65638 32 +65639 35 +65640 36 +65641 43 +65642 32 +65643 35 +65644 48 +65645 38 +65646 37 +65647 39 +65648 44 +65649 40 +65650 47 +65651 37 +65652 33 +65653 30 +65654 39 +65655 44 +65656 37 +65657 41 +65658 53 +65659 38 +65660 35 +65661 40 +65662 36 +65663 36 +65664 42 +65665 37 +65666 39 +65667 34 +65668 25 +65669 58 +65670 39 +65671 42 +65672 39 +65673 41 +65674 41 +65675 44 +65676 42 +65677 44 +65678 31 +65679 31 +65680 36 +65681 39 +65682 43 +65683 43 +65684 46 +65685 36 +65686 40 +65687 35 +65688 38 +65689 35 +65690 35 +65691 38 +65692 41 +65693 33 +65694 38 +65695 45 +65696 42 +65697 35 +65698 48 +65699 45 +65700 40 +65701 41 +65702 46 +65703 47 +65704 37 +65705 38 +65706 36 +65707 39 +65708 36 +65709 38 +65710 43 +65711 38 +65712 39 +65713 41 +65714 34 +65715 44 +65716 36 +65717 47 +65718 39 +65719 44 +65720 34 +65721 49 +65722 33 +65723 34 +65724 39 +65725 43 +65726 50 +65727 48 +65728 43 +65729 36 +65730 30 +65731 31 +65732 46 +65733 37 +65734 45 +65735 49 +65736 27 +65737 55 +65738 45 +65739 42 +65740 39 +65741 26 +65742 38 +65743 39 +65744 42 +65745 44 +65746 40 +65747 45 +65748 42 +65749 36 +65750 30 +65751 46 +65752 48 +65753 36 +65754 33 +65755 44 +65756 50 +65757 37 +65758 43 +65759 47 +65760 44 +65761 28 +65762 43 +65763 30 +65764 46 +65765 28 +65766 44 +65767 38 +65768 32 +65769 43 +65770 44 +65771 51 +65772 36 +65773 49 +65774 45 +65775 42 +65776 44 +65777 37 +65778 43 +65779 47 +65780 41 +65781 31 +65782 40 +65783 43 +65784 39 +65785 41 +65786 34 +65787 37 +65788 44 +65789 41 +65790 32 +65791 32 diff --git ql/src/test/results/clientpositive/vector_data_types.q.out ql/src/test/results/clientpositive/vector_data_types.q.out index 688e6a6..9da877c 100644 --- ql/src/test/results/clientpositive/vector_data_types.q.out +++ ql/src/test/results/clientpositive/vector_data_types.q.out @@ -345,6 +345,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out index 16c80f0..4faa4d2 100644 --- ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out @@ -82,6 +82,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: cint (type: int) @@ -224,6 +226,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] keys: _col0 (type: int) @@ -399,6 +403,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: cint (type: int) @@ -560,6 +566,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 3:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] keys: _col0 (type: int) diff --git ql/src/test/results/clientpositive/vector_decimal_precision.q.out ql/src/test/results/clientpositive/vector_decimal_precision.q.out index fd6d9c3..5dbc945 100644 --- ql/src/test/results/clientpositive/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/vector_decimal_precision.q.out @@ -586,6 +586,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -1171,6 +1173,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/vector_distinct_2.q.out ql/src/test/results/clientpositive/vector_distinct_2.q.out index c3d2d89..ea39eff 100644 --- ql/src/test/results/clientpositive/vector_distinct_2.q.out +++ ql/src/test/results/clientpositive/vector_distinct_2.q.out @@ -138,6 +138,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 8:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: t (type: tinyint), s (type: string) diff --git ql/src/test/results/clientpositive/vector_empty_where.q.out ql/src/test/results/clientpositive/vector_empty_where.q.out index 6b2c7fe..46b0f84 100644 --- ql/src/test/results/clientpositive/vector_empty_where.q.out +++ ql/src/test/results/clientpositive/vector_empty_where.q.out @@ -43,6 +43,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: cint (type: int) @@ -186,6 +188,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: cint (type: int) @@ -337,6 +341,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: cint (type: int) @@ -488,6 +494,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: cint (type: int) diff --git ql/src/test/results/clientpositive/vector_groupby_3.q.out ql/src/test/results/clientpositive/vector_groupby_3.q.out index dfac04d..34c24da 100644 --- ql/src/test/results/clientpositive/vector_groupby_3.q.out +++ ql/src/test/results/clientpositive/vector_groupby_3.q.out @@ -140,6 +140,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint, col 8:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: t (type: tinyint), s (type: string) diff --git ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out index 01c5096..cfcc78b 100644 --- ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out +++ ql/src/test/results/clientpositive/vector_groupby_mapjoin.q.out @@ -53,6 +53,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -336,6 +338,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: key (type: string) diff --git ql/src/test/results/clientpositive/vector_groupby_reduce.q.out ql/src/test/results/clientpositive/vector_groupby_reduce.q.out index 8a6135e..5c88d87 100644 --- ql/src/test/results/clientpositive/vector_groupby_reduce.q.out +++ ql/src/test/results/clientpositive/vector_groupby_reduce.q.out @@ -266,6 +266,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 9:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ss_ticket_number (type: int) @@ -458,6 +460,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 9:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ss_ticket_number (type: int) @@ -734,6 +738,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: ss_item_sk (type: int) @@ -932,6 +938,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 9:int, col 2:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] keys: ss_ticket_number (type: int), ss_item_sk (type: int) diff --git ql/src/test/results/clientpositive/vector_grouping_sets.q.out ql/src/test/results/clientpositive/vector_grouping_sets.q.out index e89b6bc..a1b3201 100644 --- ql/src/test/results/clientpositive/vector_grouping_sets.q.out +++ ql/src/test/results/clientpositive/vector_grouping_sets.q.out @@ -164,6 +164,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string, ConstantVectorExpression(val 0) -> 30:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: s_store_id (type: string), 0L (type: bigint) @@ -275,6 +277,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:string, ConstantVectorExpression(val 0) -> 30:bigint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, No Grouping Sets IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string), 0L (type: bigint) diff --git ql/src/test/results/clientpositive/vector_include_no_sel.q.out ql/src/test/results/clientpositive/vector_include_no_sel.q.out index 848823f..921cba0 100644 --- ql/src/test/results/clientpositive/vector_include_no_sel.q.out +++ ql/src/test/results/clientpositive/vector_include_no_sel.q.out @@ -241,6 +241,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/vector_orderby_5.q.out ql/src/test/results/clientpositive/vector_orderby_5.q.out index 793d99e..f8698bb 100644 --- ql/src/test/results/clientpositive/vector_orderby_5.q.out +++ ql/src/test/results/clientpositive/vector_orderby_5.q.out @@ -141,6 +141,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 7:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: bo (type: boolean) diff --git ql/src/test/results/clientpositive/vector_outer_join1.q.out ql/src/test/results/clientpositive/vector_outer_join1.q.out index a6d87c2..27b022c 100644 --- ql/src/test/results/clientpositive/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/vector_outer_join1.q.out @@ -703,6 +703,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/vector_outer_join2.q.out ql/src/test/results/clientpositive/vector_outer_join2.q.out index 77a5bc7..f0006e1 100644 --- ql/src/test/results/clientpositive/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/vector_outer_join2.q.out @@ -343,6 +343,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/vector_outer_join3.q.out ql/src/test/results/clientpositive/vector_outer_join3.q.out index 20f8f4b..f6563b4 100644 --- ql/src/test/results/clientpositive/vector_outer_join3.q.out +++ ql/src/test/results/clientpositive/vector_outer_join3.q.out @@ -244,7 +244,7 @@ left outer join small_alltypesorc_a hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.groupby.native.enabled IS true","Group By Mode HASH IS true","No Grouping Sets IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false","Single Key Column IS false","Single COUNT aggregation or Duplicate Reduction IS false"],"vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -284,7 +284,7 @@ left outer join small_alltypesorc_a hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.groupby.native.enabled IS true","Group By Mode HASH IS true","No Grouping Sets IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false","Single Key Column IS false","Single COUNT aggregation or Duplicate Reduction IS false"],"vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd @@ -324,7 +324,7 @@ left outer join small_alltypesorc_a hd on hd.cstring1 = c.cstring1 and hd.cint = c.cint ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.groupby.native.enabled IS true","Group By Mode HASH IS true","No Grouping Sets IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false","Single Key Column IS false","Single COUNT aggregation or Duplicate Reduction IS false"],"vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a c left outer join small_alltypesorc_a cd diff --git ql/src/test/results/clientpositive/vector_outer_join4.q.out ql/src/test/results/clientpositive/vector_outer_join4.q.out index 51ed3a2..3e587af 100644 --- ql/src/test/results/clientpositive/vector_outer_join4.q.out +++ ql/src/test/results/clientpositive/vector_outer_join4.q.out @@ -782,7 +782,7 @@ left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint ) t1 POSTHOOK: type: QUERY -{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.groupby.native.enabled IS true","Group By Mode HASH IS true","No Grouping Sets IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false","Single Key Column IS false","Single COUNT aggregation or Duplicate Reduction IS false"],"vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[]","featureSupportInUse:":"[]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b cd diff --git ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out index 7454c4b..f392b9e 100644 --- ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out +++ ql/src/test/results/clientpositive/vector_outer_join_no_keys.q.out @@ -98,6 +98,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -243,6 +245,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out index b46501e..642d3b2 100644 --- ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out +++ ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out @@ -61,6 +61,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:double, col 2:decimal(20,10), col 3:decimal(23,14) native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: cint (type: int), cdouble (type: double), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) diff --git ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out index 8784836..c39f561 100644 --- ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out +++ ql/src/test/results/clientpositive/vector_reduce_groupby_duplicate_cols.q.out @@ -98,6 +98,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:int, col 1:int native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: int) diff --git ql/src/test/results/clientpositive/vector_string_concat.q.out ql/src/test/results/clientpositive/vector_string_concat.q.out index bede8a1..1eed7f5 100644 --- ql/src/test/results/clientpositive/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/vector_string_concat.q.out @@ -348,6 +348,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 20:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: _col0 (type: string) diff --git ql/src/test/results/clientpositive/vector_when_case_null.q.out ql/src/test/results/clientpositive/vector_when_case_null.q.out index 13fb6d1..17eb3bb 100644 --- ql/src/test/results/clientpositive/vector_when_case_null.q.out +++ ql/src/test/results/clientpositive/vector_when_case_null.q.out @@ -56,6 +56,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: _col0 (type: string) diff --git ql/src/test/results/clientpositive/vectorization_1.q.out ql/src/test/results/clientpositive/vectorization_1.q.out index bb8e483..4bde6ff 100644 --- ql/src/test/results/clientpositive/vectorization_1.q.out +++ ql/src/test/results/clientpositive/vectorization_1.q.out @@ -81,6 +81,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/vectorization_12.q.out ql/src/test/results/clientpositive/vectorization_12.q.out index e129730..ada86cd 100644 --- ql/src/test/results/clientpositive/vectorization_12.q.out +++ ql/src/test/results/clientpositive/vectorization_12.q.out @@ -105,6 +105,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double, col 3:bigint, col 6:string, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col3 (type: double), _col0 (type: bigint), _col2 (type: string), _col1 (type: boolean) diff --git ql/src/test/results/clientpositive/vectorization_13.q.out ql/src/test/results/clientpositive/vectorization_13.q.out index 96eda74..03fc9e3 100644 --- ql/src/test/results/clientpositive/vectorization_13.q.out +++ ql/src/test/results/clientpositive/vectorization_13.q.out @@ -107,6 +107,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) @@ -437,6 +439,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 10:boolean, col 0:tinyint, col 8:timestamp, col 4:float, col 6:string native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) diff --git ql/src/test/results/clientpositive/vectorization_14.q.out ql/src/test/results/clientpositive/vectorization_14.q.out index 7a7a817..9c65a7d 100644 --- ql/src/test/results/clientpositive/vectorization_14.q.out +++ ql/src/test/results/clientpositive/vectorization_14.q.out @@ -107,6 +107,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 4:float, col 5:double, col 8:timestamp, col 10:boolean native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] keys: _col2 (type: string), _col1 (type: float), _col4 (type: double), _col0 (type: timestamp), _col3 (type: boolean) diff --git ql/src/test/results/clientpositive/vectorization_15.q.out ql/src/test/results/clientpositive/vectorization_15.q.out index dbef3e7..16586b7 100644 --- ql/src/test/results/clientpositive/vectorization_15.q.out +++ ql/src/test/results/clientpositive/vectorization_15.q.out @@ -103,6 +103,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 4:float, col 10:boolean, col 5:double, col 6:string, col 0:tinyint, col 2:int, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] keys: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) diff --git ql/src/test/results/clientpositive/vectorization_16.q.out ql/src/test/results/clientpositive/vectorization_16.q.out index 571eae0..32a3545 100644 --- ql/src/test/results/clientpositive/vectorization_16.q.out +++ ql/src/test/results/clientpositive/vectorization_16.q.out @@ -80,6 +80,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/vectorization_2.q.out ql/src/test/results/clientpositive/vectorization_2.q.out index e3d6ad0..0dbd04d 100644 --- ql/src/test/results/clientpositive/vectorization_2.q.out +++ ql/src/test/results/clientpositive/vectorization_2.q.out @@ -85,6 +85,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] mode: hash diff --git ql/src/test/results/clientpositive/vectorization_3.q.out ql/src/test/results/clientpositive/vectorization_3.q.out index bb6c014..9a0f112 100644 --- ql/src/test/results/clientpositive/vectorization_3.q.out +++ ql/src/test/results/clientpositive/vectorization_3.q.out @@ -90,6 +90,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] mode: hash diff --git ql/src/test/results/clientpositive/vectorization_4.q.out ql/src/test/results/clientpositive/vectorization_4.q.out index 395431c..0107dad 100644 --- ql/src/test/results/clientpositive/vectorization_4.q.out +++ ql/src/test/results/clientpositive/vectorization_4.q.out @@ -85,6 +85,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/vectorization_5.q.out ql/src/test/results/clientpositive/vectorization_5.q.out index dfe9715..ed4eeff 100644 --- ql/src/test/results/clientpositive/vectorization_5.q.out +++ ql/src/test/results/clientpositive/vectorization_5.q.out @@ -78,6 +78,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/vectorization_9.q.out ql/src/test/results/clientpositive/vectorization_9.q.out index 571eae0..32a3545 100644 --- ql/src/test/results/clientpositive/vectorization_9.q.out +++ ql/src/test/results/clientpositive/vectorization_9.q.out @@ -80,6 +80,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 6:string, col 5:double, col 8:timestamp native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] keys: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) diff --git ql/src/test/results/clientpositive/vectorization_limit.q.out ql/src/test/results/clientpositive/vectorization_limit.q.out index 7474547..1d89b83 100644 --- ql/src/test/results/clientpositive/vectorization_limit.q.out +++ ql/src/test/results/clientpositive/vectorization_limit.q.out @@ -245,6 +245,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] keys: _col0 (type: tinyint) @@ -425,6 +427,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 0:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Single COUNT aggregation or Duplicate Reduction IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false vectorProcessingMode: HASH projectedOutputColumnNums: [] keys: ctinyint (type: tinyint) @@ -729,6 +733,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 5:double native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] keys: cdouble (type: double) diff --git ql/src/test/results/clientpositive/vectorization_nested_udf.q.out ql/src/test/results/clientpositive/vectorization_nested_udf.q.out index 2c4fa69..324576a 100644 --- ql/src/test/results/clientpositive/vectorization_nested_udf.q.out +++ ql/src/test/results/clientpositive/vectorization_nested_udf.q.out @@ -38,6 +38,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_case.q.out ql/src/test/results/clientpositive/vectorized_case.q.out index 31dcd37..dc8b84e 100644 --- ql/src/test/results/clientpositive/vectorized_case.q.out +++ ql/src/test/results/clientpositive/vectorized_case.q.out @@ -292,6 +292,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -407,6 +409,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_date_funcs.q.out ql/src/test/results/clientpositive/vectorized_date_funcs.q.out index 50c3448..a5e2906 100644 --- ql/src/test/results/clientpositive/vectorized_date_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_date_funcs.q.out @@ -1240,6 +1240,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/vectorized_mapjoin.q.out index d9c781c..3c5854e 100644 --- ql/src/test/results/clientpositive/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/vectorized_mapjoin.q.out @@ -93,6 +93,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out index e9a0e45..01c7db6 100644 --- ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out +++ ql/src/test/results/clientpositive/vectorized_mapjoin2.q.out @@ -114,6 +114,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out index fb7198d..fe96550 100644 --- ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out +++ ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out @@ -133,6 +133,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -307,6 +309,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -481,6 +485,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_parquet_types.q.out ql/src/test/results/clientpositive/vectorized_parquet_types.q.out index 3b7de64..65e2cff 100644 --- ql/src/test/results/clientpositive/vectorized_parquet_types.q.out +++ ql/src/test/results/clientpositive/vectorized_parquet_types.q.out @@ -360,6 +360,8 @@ STAGE PLANS: groupByMode: HASH keyExpressions: col 1:tinyint native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Single Key Column IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] keys: _col0 (type: tinyint) diff --git ql/src/test/results/clientpositive/vectorized_timestamp.q.out ql/src/test/results/clientpositive/vectorized_timestamp.q.out index b0bfc8b..8633b04 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp.q.out @@ -135,6 +135,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -322,6 +324,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1] mode: hash @@ -429,6 +433,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2] mode: hash diff --git ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index 244aca6..553847c 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -732,6 +732,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash @@ -839,6 +841,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0] mode: hash @@ -959,6 +963,8 @@ STAGE PLANS: className: VectorGroupByOperator groupByMode: HASH native: false + nativeConditionsMet: hive.vectorized.execution.groupby.native.enabled IS true, Group By Mode HASH IS true, No Grouping Sets IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Single Key Column IS false, Single COUNT aggregation or Duplicate Reduction IS false vectorProcessingMode: HASH projectedOutputColumnNums: [0, 1, 2, 3] mode: hash diff --git vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java index fbb89a9..fce013c 100644 --- vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java +++ vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java @@ -26,10 +26,13 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import org.apache.commons.lang.StringUtils; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; @@ -1162,6 +1165,32 @@ //template, , , {"VectorUDAFVarMerge", "VectorUDAFVarPartial2", "PARTIAL2"}, {"VectorUDAFVarMerge", "VectorUDAFVarFinal", "FINAL"}, + + {"GroupByHashSingleKeyOperatorBase", "VectorGroupByHash", "Long", "KeySingleCountOperatorBase", "SingleCount"}, + {"GroupByHashSingleKeyOperatorBase", "VectorGroupByHash", "String", "KeySingleCountOperatorBase", "SingleCount"}, + {"GroupByHashSingleKeyOperatorBase", "VectorGroupByHash", "Serialize", "KeySingleCountOperatorBase", "SingleCount"}, + + {"GroupByHashSingleKeySingleCountColumnOperator", "VectorGroupByHash", "Long", "KeySingleCountColumnOperator", "SingleCount"}, + {"GroupByHashSingleKeySingleCountColumnOperator", "VectorGroupByHash", "String", "KeySingleCountColumnOperator", "SingleCount"}, + {"GroupByHashSingleKeySingleCountColumnOperator", "VectorGroupByHash", "Serialize", "KeySingleCountColumnOperator", "SingleCount"}, + + {"GroupByHashSingleKeySingleCountKeyOperator", "VectorGroupByHash", "Long", "KeySingleCountKeyOperator", "SingleCount"}, + {"GroupByHashSingleKeySingleCountKeyOperator", "VectorGroupByHash", "String", "KeySingleCountKeyOperator", "SingleCount"}, + {"GroupByHashSingleKeySingleCountKeyOperator", "VectorGroupByHash", "Serialize", "KeySingleCountKeyOperator", "SingleCount"}, + + {"GroupByHashSingleKeySingleCountStarOperator", "VectorGroupByHash", "Long", "KeySingleCountStarOperator", "SingleCount"}, + {"GroupByHashSingleKeySingleCountStarOperator", "VectorGroupByHash", "String", "KeySingleCountStarOperator", "SingleCount"}, + {"GroupByHashSingleKeySingleCountStarOperator", "VectorGroupByHash", "Serialize", "KeySingleCountStarOperator", "SingleCount"}, + + + {"GroupByHashSingleKeyOperatorBase", "VectorGroupByHash", "Long", "KeyDuplicateReductionOperatorBase", "DuplicateReduction"}, + {"GroupByHashSingleKeyOperatorBase", "VectorGroupByHash", "String", "KeyDuplicateReductionOperatorBase", "DuplicateReduction"}, + {"GroupByHashSingleKeyOperatorBase", "VectorGroupByHash", "Serialize", "KeyDuplicateReductionOperatorBase", "DuplicateReduction"}, + + {"GroupByHashSingleKeyDuplicateReductionOperator", "VectorGroupByHash", "Long", "KeyDuplicateReductionOperator", "DuplicateReduction"}, + {"GroupByHashSingleKeyDuplicateReductionOperator", "VectorGroupByHash", "String", "KeyDuplicateReductionOperator", "DuplicateReduction"}, + {"GroupByHashSingleKeyDuplicateReductionOperator", "VectorGroupByHash", "Serialize", "KeyDuplicateReductionOperator", "DuplicateReduction"}, + }; @@ -1174,6 +1203,11 @@ private String udafOutputDirectory; private String udafClassesDirectory; private String udafTemplateDirectory; + + private String groupByOperatorOutputDirectory; + private String groupByOperatorClassesDirectory; + private String groupByOperatorTemplateDirectory; + private GenVectorTestCode testCodeGen; static String joinPath(String...parts) { @@ -1210,6 +1244,16 @@ public void init(String templateBaseDir, String buildDir) { udafTemplateDirectory = joinPath(generationDirectory.getAbsolutePath(), "UDAFTemplates"); + String groupByOperator = joinPath("org", "apache", "hadoop", + "hive", "ql", "exec", "vector", "groupby", "operator", "gen"); + File groupByOperatorOutput = new File(joinPath(buildPath, groupByOperator)); + File groupByOperatorClasses = new File(joinPath(compiledPath, groupByOperator)); + groupByOperatorOutputDirectory = groupByOperatorOutput.getAbsolutePath(); + groupByOperatorClassesDirectory = groupByOperatorClasses.getAbsolutePath(); + + groupByOperatorTemplateDirectory = + joinPath(generationDirectory.getAbsolutePath(), "GroupByOperatorTemplates"); + File testCodeOutput = new File( joinPath(buildDir, "generated-test-sources", "java", "org", @@ -1433,6 +1477,13 @@ private void generate() throws Exception { } else if (tdesc[0].equals("TimestampArithmeticDate")) { generateTimestampArithmeticDate(tdesc); + } else if ( + tdesc[0].equals("GroupByHashSingleKeyOperatorBase") || + tdesc[0].equals("GroupByHashSingleKeySingleCountColumnOperator") || + tdesc[0].equals("GroupByHashSingleKeySingleCountKeyOperator") || + tdesc[0].equals("GroupByHashSingleKeySingleCountStarOperator") || + tdesc[0].equals("GroupByHashSingleKeyDuplicateReductionOperator")) { + generateGroupByOperator(tdesc); } else { continue; } @@ -3492,35 +3543,115 @@ private static boolean isTimestampIntervalType(String type) { || type.equals("interval_day_time")); } - private boolean containsDefinedStrings(Set defineSet, String commaDefinedString) { - String[] definedStrings = commaDefinedString.split(","); - boolean result = false; - for (String definedString : definedStrings) { - if (defineSet.contains(definedString)) { - result = true; - break; + private void generateGroupByOperator(String[] tdesc) throws Exception { + String templateName = tdesc[0]; + String prefix = tdesc[1]; + String singleKeyVariation = tdesc[2]; + String suffix = tdesc[3]; + String aggregationVariation = tdesc[4]; + + //Read the template into a string; + String className = prefix + singleKeyVariation + suffix; + File templateFile = + new File(joinPath(this.groupByOperatorTemplateDirectory, templateName + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + final String keyColumnVectorType; + if (singleKeyVariation.equals("Long")) { + keyColumnVectorType = "LongColumnVector"; + } else if (singleKeyVariation.equals("String")) { + keyColumnVectorType = "BytesColumnVector"; + } else { + keyColumnVectorType = "ColumnVector"; + } + templateString = templateString.replaceAll("", singleKeyVariation); + templateString = templateString.replaceAll("", singleKeyVariation.toLowerCase()); + templateString = templateString.replaceAll("", aggregationVariation); + templateString = templateString.replaceAll("", aggregationVariation.toLowerCase()); + templateString = templateString.replaceAll("", keyColumnVectorType); + + final String defineName = singleKeyVariation.toUpperCase() + "_KEY"; + templateString = evaluateIfDefined(templateString, defineName, + this.groupByOperatorTemplateDirectory); + + writeFile(templateFile.lastModified(), groupByOperatorOutputDirectory, groupByOperatorClassesDirectory, + className, templateString); + } + + private boolean matchesDefinedStrings(Set defineSet, Set newIfDefinedSet, + IfDefinedMode ifDefinedMode) { + switch (ifDefinedMode) { + case SINGLE: + case AND_ALL: + for (String candidateString : newIfDefinedSet) { + if (!defineSet.contains(candidateString)) { + return false; + } + } + return true; + case OR_ANY: + for (String candidateString : newIfDefinedSet) { + if (defineSet.contains(candidateString)) { + return true; + } + } + return false; + default: + throw new RuntimeException("Unexpected if defined mode " + ifDefinedMode); + } + } + + public enum IfDefinedMode { + SINGLE, + AND_ALL, + OR_ANY; + } + + private IfDefinedMode parseIfDefinedMode(String newIfDefinedString, Set newIfDefinedSet) { + final String[] newIfDefinedStrings; + final IfDefinedMode ifDefinedMode; + int index = newIfDefinedString.indexOf("&&"); + if (index != -1) { + newIfDefinedStrings = newIfDefinedString.split("&&"); + ifDefinedMode = IfDefinedMode.AND_ALL; + } else { + index = newIfDefinedString.indexOf("||"); + if (index == -1) { + + // One element. + newIfDefinedSet.add(newIfDefinedString); + return IfDefinedMode.SINGLE; + } else { + newIfDefinedStrings = newIfDefinedString.split("\\|\\|"); + ifDefinedMode = IfDefinedMode.OR_ANY; } } - return result; + for (String newDefinedString : newIfDefinedStrings) { + newIfDefinedSet.add(newDefinedString); + } + return ifDefinedMode; } - private int doIfDefinedStatement(String[] lines, int index, Set definedSet, - boolean outerInclude, StringBuilder sb) { - String ifLine = lines[index]; + private int doIfDefinedStatement(List linesList, int index, Set definedSet, + boolean outerInclude, List ifDefinedEvaluatedLinesList) { + String ifLine = linesList.get(index); final int ifLineNumber = index + 1; - String commaDefinedString = ifLine.substring("#IF ".length()); - boolean includeBody = containsDefinedStrings(definedSet, commaDefinedString); + String newIfDefinedString = ifLine.substring("#IF ".length()); + Set newIfDefinedSet = new HashSet(); + IfDefinedMode ifDefinedMode = parseIfDefinedMode(newIfDefinedString, newIfDefinedSet); + boolean includeBody = matchesDefinedStrings(definedSet, newIfDefinedSet, ifDefinedMode); index++; - final int end = lines.length; + final int end = linesList.size(); while (true) { if (index >= end) { - throw new RuntimeException("Unmatched #IF at line " + index + " for " + commaDefinedString); + throw new RuntimeException("Unmatched #IF at line " + index + " for " + newIfDefinedString); } - String line = lines[index]; + String line = linesList.get(index); if (line.length() == 0 || line.charAt(0) != '#') { if (outerInclude && includeBody) { - sb.append(line); - sb.append("\n"); + ifDefinedEvaluatedLinesList.add(line); + // sb.append(line); + // sb.append("\n"); } index++; continue; @@ -3529,7 +3660,9 @@ private int doIfDefinedStatement(String[] lines, int index, Set definedS // A pound # statement (IF/ELSE/ENDIF). if (line.startsWith("#IF ")) { // Recurse. - index = doIfDefinedStatement(lines, index, definedSet, outerInclude && includeBody, sb); + index = + doIfDefinedStatement( + linesList, index, definedSet, outerInclude && includeBody, ifDefinedEvaluatedLinesList); } else if (line.equals("#ELSE")) { // Flip inclusion. includeBody = !includeBody; @@ -3538,10 +3671,10 @@ private int doIfDefinedStatement(String[] lines, int index, Set definedS throw new RuntimeException("Missing defined strings with #ENDIF on line " + (index + 1)); } else if (line.startsWith("#ENDIF ")) { String endCommaDefinedString = line.substring("#ENDIF ".length()); - if (!commaDefinedString.equals(endCommaDefinedString)) { + if (!newIfDefinedString.equals(endCommaDefinedString)) { throw new RuntimeException( "#ENDIF defined names \"" + endCommaDefinedString + "\" (line " + ifLineNumber + - " do not match \"" + commaDefinedString + "\" (line " + (index + 1) + ")"); + " do not match \"" + newIfDefinedString + "\" (line " + (index + 1) + ")"); } return ++index; } else { @@ -3550,44 +3683,213 @@ private int doIfDefinedStatement(String[] lines, int index, Set definedS } } - private void doEvaluateIfDefined(String[] lines, int index, Set definedSet, - boolean outerInclude, StringBuilder sb) { - final int end = lines.length; + private void doEvaluateIfDefined(List linesList, int index, Set definedSet, + boolean outerInclude, List ifDefinedEvaluatedLinesList) { + final int end = linesList.size(); while (true) { if (index >= end) { break; } - String line = lines[index]; + String line = linesList.get(index); if (line.length() == 0 || line.charAt(0) != '#') { if (outerInclude) { - sb.append(line); - sb.append("\n"); + ifDefinedEvaluatedLinesList.add(line); } index++; continue; } - // A pound # statement (IF/ELSE/ENDIF). if (line.startsWith("#IF ")) { - index = doIfDefinedStatement(lines, index, definedSet, outerInclude, sb); + + // A pound # statement (#IF #ELSE #ENDIF). + index = + doIfDefinedStatement( + linesList, index, definedSet, outerInclude, ifDefinedEvaluatedLinesList); + } else if ( + !line.startsWith("#BEGIN_LINES ") && + !line.startsWith("#END_LINES") && + line.startsWith("#USE_LINES ") && + line.startsWith("#COMMENT")) { + throw new RuntimeException( + "Problem with #IF #ELSE #ENDIF on line " + (index + 1) + ": " + line); } else { - throw new RuntimeException("Problem with #IF/#ELSE/#ENDIF on line " + (index + 1) + ": " + line); + if (outerInclude) { + ifDefinedEvaluatedLinesList.add(line); + } + index++; } } } - private String evaluateIfDefined(String linesString, List definedList) { + private void doUseLinesCollectAndFilter(List linesList, Map> useLinesMap, + List filteredLinesList) { + + int index = 0; + final int size = linesList.size(); + while (true) { + + if (index >= size) { + return; + } + String line = linesList.get(index); + if (line.startsWith("#BEGIN_LINES ")) { + + final int beginLineIndex = index; + String linesTitle = line.substring("#BEGIN_LINES ".length()); + if (useLinesMap.containsKey(linesTitle)) { + throw new RuntimeException( + "Problem #BEGIN_LINES that started at " + beginLineIndex + + " -- duplicate name " + linesTitle); + } + while (true) { + if (index >= size) { + throw new RuntimeException( + "Problem #BEGIN_LINES that started at " + beginLineIndex + + " -- no matching #END_LINES found"); + } + line = linesList.get(index); + if (line.startsWith("#END_LINES")) { + useLinesMap.put(linesTitle, linesList.subList(beginLineIndex + 1, index)); + break; + } + index++; + } + } else if (line.startsWith("#COMMENT")) { + // Filter out comment lines. + } else { + filteredLinesList.add(line); + } + index++; + } + } + + private void doUseLinesApply(List linesList, Map> useLinesMap, + List resultLinesList) { + + int index = 0; + final int size = linesList.size(); + while (true) { + + if (index >= size) { + return; + } + String line = linesList.get(index); + if (line.startsWith("#USE_LINES ")) { + + String linesTitle = line.substring("#USE_LINES ".length()); + final int blankCharIndex = linesTitle.indexOf(" "); + int pad = 0; + if (blankCharIndex != -1) { + String remainder = linesTitle.substring(blankCharIndex + 1); + linesTitle = linesTitle.substring(0, blankCharIndex); + if (!remainder.startsWith("+")) { + throw new RuntimeException( + "Problem #USE_LINES that started at " + index + + " -- expecting + sign for indent"); + } + String padString = remainder.substring(1); + pad = Integer.valueOf(padString); + } + List useLines = useLinesMap.get(linesTitle); + if (useLines == null) { + throw new RuntimeException( + "Problem #USE_LINES that started at " + index + + " -- name " + linesTitle + " not found"); + } + if (pad == 0) { + resultLinesList.addAll(useLines); + } else { + String padoutString = StringUtils.leftPad("", pad); + for (String useLine : useLines) { + resultLinesList.add(padoutString + useLine); + } + } + } else { + resultLinesList.add(line); + } + index++; + } + } + + private void doIncludeProcessing(String[] lines, String templateDirectory, + List resultList) throws IOException { + + // Just one level. + int index = 0; + final int size = lines.length; + while (true) { + + if (index >= size) { + return; + } + String line = lines[index]; + if (line.startsWith("#INCLUDE ")) { + String includeFileName = line.substring("#INCLUDE ".length()); + File includeFile = + new File(joinPath(templateDirectory, includeFileName + ".txt")); + String includeString = readFile(includeFile); + String[] includeLines = includeString.split("\n"); + List includeLinesList = Arrays.asList(includeLines); + resultList.addAll(includeLinesList); + } else { + resultList.add(line); + } + index++; + } + } + + private String evaluateIfDefined(String linesString, List definedList, + String templateDirectory) throws IOException { String[] lines = linesString.split("\n"); Set definedSet = new HashSet(definedList); + List ifDefinedEvaluatedLinesList = new ArrayList(); + + List includedLinesList; + if (templateDirectory == null) { + includedLinesList = Arrays.asList(lines); + } else { + includedLinesList = new ArrayList(); + doIncludeProcessing(lines, templateDirectory, includedLinesList); + } + + doEvaluateIfDefined(includedLinesList, 0, definedSet, true, ifDefinedEvaluatedLinesList); + + Map> useLinesMap = new HashMap>(); + List filteredLinesList = new ArrayList(); + doUseLinesCollectAndFilter(ifDefinedEvaluatedLinesList, useLinesMap, filteredLinesList); + + List resultLinesList; + if (useLinesMap.isEmpty()) { + resultLinesList = filteredLinesList; + } else { + resultLinesList = new ArrayList(); + doUseLinesApply(filteredLinesList, useLinesMap, resultLinesList); + } + StringBuilder sb = new StringBuilder(); - doEvaluateIfDefined(lines, 0, definedSet, true, sb); + for (String line : resultLinesList) { + sb.append(line); + sb.append("\n"); + } return sb.toString(); } - private String evaluateIfDefined(String linesString, String definedString) { + private String evaluateIfDefined(String linesString, List definedList) + throws IOException { + return evaluateIfDefined(linesString, definedList, null); + } + + private String evaluateIfDefined(String linesString, String definedString) + throws IOException{ return evaluateIfDefined(linesString, Arrays.asList(definedString.split(","))); } + private String evaluateIfDefined(String linesString, String definedString, + String templateDirectory) throws IOException { + return evaluateIfDefined(linesString, Arrays.asList(definedString.split(",")), + templateDirectory); + } + static void writeFile(long templateTime, String outputDir, String classesDir, String className, String str) throws IOException { File outputFile = new File(outputDir, className + ".java");