diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java index 1aff2e0..683e11c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java @@ -653,7 +653,7 @@ public void testReadApps() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(3, entities.size()); int cfgCnt = 0; @@ -690,8 +690,8 @@ public void testFilterAppsByCreatedTime() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, - null, null, null, null, null), + new TimelineEntityFilters.Builder().createdTimeBegin(1425016502000L) + .createdTimeEnd(1425016502040L).build(), new TimelineDataToRetrieve()); assertEquals(3, entities.size()); for (TimelineEntity entity : entities) { @@ -707,8 +707,8 @@ public void testFilterAppsByCreatedTime() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, 1425016502015L, null, null, null, null, - null, null, null), + new TimelineEntityFilters.Builder().createdTimeBegin(1425016502015L) + .build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); for (TimelineEntity entity : entities) { @@ -722,8 +722,8 @@ public void testFilterAppsByCreatedTime() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, 1425016502015L, null, null, null, - null, null, null), + new TimelineEntityFilters.Builder().createdTimeEnd(1425016502015L) + .build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity entity : entities) { @@ -749,7 +749,7 @@ public void testReadAppsDefaultView() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve()); assertEquals(3, es1.size()); for (TimelineEntity e : es1) { @@ -774,7 +774,7 @@ public void testReadAppsByFields() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve( null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS), null)); assertEquals(3, es1.size()); @@ -804,8 +804,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt, null, null, null, - null), + new TimelineEntityFilters.Builder().isRelatedTo(irt).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); int isRelatedToCnt = 0; @@ -830,8 +829,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt1, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt1).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -854,8 +852,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt2, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt2).build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); isRelatedToCnt = 0; @@ -877,8 +874,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt3, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt3).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -901,8 +897,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt4, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt4).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -914,8 +909,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt5, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt5).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -935,8 +929,7 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt6, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt6).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -963,8 +956,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); int relatesToCnt = 0; @@ -989,8 +981,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt1, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt1).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1013,8 +1004,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt2, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt2).build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); relatesToCnt = 0; @@ -1036,8 +1026,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt3, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt3).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1060,8 +1049,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt4, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt4).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -1073,8 +1061,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt5, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt5).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -1094,8 +1081,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt6, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt6).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1131,8 +1117,7 @@ public void testReadAppsRelatesTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, rt7, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt7).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1169,8 +1154,8 @@ public void testReadAppsRelationsAndEventFiltersDefaultView() new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, relatesTo, isRelatedTo, - null, null, null, eventFilter), + new TimelineEntityFilters.Builder().relatesTo(relatesTo) + .isRelatedTo(isRelatedTo).eventFilters(eventFilter).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); int eventCnt = 0; @@ -1207,8 +1192,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(2, entities.size()); @@ -1222,8 +1207,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); cfgCnt = 0; @@ -1239,8 +1224,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList1, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList1) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(1, entities.size()); @@ -1259,8 +1244,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList2, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList2) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(0, entities.size()); @@ -1272,8 +1257,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList3, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList3) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(0, entities.size()); @@ -1285,8 +1270,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList4, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList4) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(0, entities.size()); @@ -1298,8 +1283,8 @@ public void testReadAppsConfigFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList5, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList5) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(3, entities.size()); @@ -1316,8 +1301,7 @@ public void testReadAppsEventFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef), + new TimelineEntityFilters.Builder().eventFilters(ef).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(1, entities.size()); int eventCnt = 0; @@ -1338,8 +1322,8 @@ public void testReadAppsEventFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef1), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().eventFilters(ef1).build(), + new TimelineDataToRetrieve()); assertEquals(1, entities.size()); eventCnt = 0; for (TimelineEntity timelineEntity : entities) { @@ -1357,8 +1341,7 @@ public void testReadAppsEventFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef2), + new TimelineEntityFilters.Builder().eventFilters(ef2).build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); eventCnt = 0; @@ -1381,8 +1364,7 @@ public void testReadAppsEventFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef3), + new TimelineEntityFilters.Builder().eventFilters(ef3).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -1399,8 +1381,7 @@ public void testReadAppsEventFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef4), + new TimelineEntityFilters.Builder().eventFilters(ef4).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); eventCnt = 0; @@ -1421,8 +1402,7 @@ public void testReadAppsEventFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef5), + new TimelineEntityFilters.Builder().eventFilters(ef5).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); eventCnt = 0; @@ -1451,7 +1431,7 @@ public void testReadAppsConfigPrefix() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null) , - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(list, null, null, null)); int cfgCnt = 0; for (TimelineEntity entity : es1) { @@ -1476,8 +1456,8 @@ public void testReadAppsConfigFilterPrefix() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve(list, null, null, null)); assertEquals(1, entities.size()); int cfgCnt = 0; @@ -1509,8 +1489,8 @@ public void testReadAppsConfigFilterPrefix() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList1, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList1) + .build(), new TimelineDataToRetrieve(confsToRetrieve, null, null, null)); assertEquals(2, entities.size()); cfgCnt = 0; @@ -1540,8 +1520,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(2, entities.size()); @@ -1555,8 +1535,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); metricCnt = 0; @@ -1574,8 +1554,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(1, entities.size()); @@ -1594,8 +1574,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList2, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList2) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1607,8 +1587,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList3, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList3) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1620,8 +1600,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList4, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList4) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1633,8 +1613,8 @@ public void testReadAppsMetricFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList5, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList5) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(3, entities.size()); @@ -1656,7 +1636,7 @@ public void testReadAppsMetricPrefix() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, list, null, null)); int metricCnt = 0; for (TimelineEntity entity : es1) { @@ -1681,8 +1661,8 @@ public void testReadAppsMetricFilterPrefix() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve(null, list, null, null)); int metricCnt = 0; assertEquals(1, entities.size()); @@ -1707,8 +1687,8 @@ public void testReadAppsMetricFilterPrefix() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null)); metricCnt = 0; assertEquals(2, entities.size()); @@ -1724,8 +1704,9 @@ public void testReadAppsMetricFilterPrefix() throws Exception { entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), new TimelineDataToRetrieve(null, + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), + new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.METRICS), Integer.MAX_VALUE)); metricCnt = 0; int metricValCnt = 0; @@ -1760,8 +1741,7 @@ public void testReadAppsInfoFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(2, entities.size()); int infoCnt = 0; @@ -1777,8 +1757,8 @@ public void testReadAppsInfoFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList1) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(1, entities.size()); infoCnt = 0; @@ -1796,8 +1776,8 @@ public void testReadAppsInfoFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList2) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(0, entities.size()); @@ -1808,8 +1788,8 @@ public void testReadAppsInfoFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList3) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(0, entities.size()); @@ -1820,8 +1800,8 @@ public void testReadAppsInfoFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList4) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(0, entities.size()); @@ -1832,8 +1812,8 @@ public void testReadAppsInfoFilters() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList5, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList5) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(3, entities.size()); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java index c731631..4cd074b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java @@ -298,7 +298,7 @@ public void testWriteEntityToHBase() throws Exception { Set es1 = reader.getEntities( new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE)); assertNotNull(e1); @@ -448,7 +448,7 @@ public void testEventsWithEmptyInfo() throws IOException { Set es1 = reader.getEntities( new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertNotNull(e1); assertEquals(1, es1.size()); @@ -544,7 +544,7 @@ public void testReadEntities() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", - null), new TimelineEntityFilters(), + null), new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(3, entities.size()); int cfgCnt = 0; @@ -580,8 +580,9 @@ public void testFilterEntitiesByCreatedTime() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, - null, null, null, null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeBegin(1425016502000L) + .createdTimeEnd(1425016502040L).build(), + new TimelineDataToRetrieve()); assertEquals(3, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("hello") && !entity.getId().equals("hello1") && @@ -593,8 +594,9 @@ public void testFilterEntitiesByCreatedTime() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, 1425016502015L, null, null, null, null, - null, null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeBegin(1425016502015L) + .build(), + new TimelineDataToRetrieve()); assertEquals(2, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("hello1") && @@ -605,8 +607,9 @@ public void testFilterEntitiesByCreatedTime() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, 1425016502015L, null, null, null, - null, null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeEnd(1425016502015L) + .build(), + new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("hello")) { @@ -638,8 +641,9 @@ public void testReadEntitiesRelationsAndEventFiltersDefaultView() Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, relatesTo, isRelatedTo, - null, null, null, eventFilter), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().relatesTo(relatesTo) + .isRelatedTo(isRelatedTo).eventFilters(eventFilter).build(), + new TimelineDataToRetrieve()); assertEquals(1, entities.size()); int eventCnt = 0; int isRelatedToCnt = 0; @@ -667,8 +671,7 @@ public void testReadEntitiesEventFilters() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef), + new TimelineEntityFilters.Builder().eventFilters(ef).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(1, entities.size()); int eventCnt = 0; @@ -688,8 +691,7 @@ public void testReadEntitiesEventFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef1), + new TimelineEntityFilters.Builder().eventFilters(ef1).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); eventCnt = 0; @@ -707,8 +709,7 @@ public void testReadEntitiesEventFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef2), + new TimelineEntityFilters.Builder().eventFilters(ef2).build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); eventCnt = 0; @@ -729,8 +730,7 @@ public void testReadEntitiesEventFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef3), + new TimelineEntityFilters.Builder().eventFilters(ef3).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -746,8 +746,7 @@ public void testReadEntitiesEventFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef4), + new TimelineEntityFilters.Builder().eventFilters(ef4).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); eventCnt = 0; @@ -767,8 +766,7 @@ public void testReadEntitiesEventFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, ef5), + new TimelineEntityFilters.Builder().eventFilters(ef5).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); eventCnt = 0; @@ -793,8 +791,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt, null, null, null, - null), + new TimelineEntityFilters.Builder().isRelatedTo(irt).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); int isRelatedToCnt = 0; @@ -817,8 +814,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt1, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt1).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -840,8 +836,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt2, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt2).build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); isRelatedToCnt = 0; @@ -861,8 +856,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt3, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt3).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -884,8 +878,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt4, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt4).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -896,8 +889,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt5, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt5).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -916,8 +908,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, irt6, null, null, - null, null), + new TimelineEntityFilters.Builder().isRelatedTo(irt6).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -942,8 +933,7 @@ public void testReadEntitiesRelatesTo() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt).build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); int relatesToCnt = 0; @@ -966,8 +956,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt1, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt1).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -989,8 +978,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt2, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt2).build(), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); relatesToCnt = 0; @@ -1010,8 +998,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt3, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt3).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1033,8 +1020,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt4, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt4).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -1045,8 +1031,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt5, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt5).build(), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -1065,8 +1050,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt6, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt6).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1101,8 +1085,7 @@ public void testReadEntitiesRelatesTo() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, rt7, null, null, null, null, - null), + new TimelineEntityFilters.Builder().relatesTo(rt7).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); relatesToCnt = 0; @@ -1128,7 +1111,7 @@ public void testReadEntitiesDefaultView() throws Exception { Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve()); assertEquals(3, es1.size()); for (TimelineEntity e : es1) { @@ -1151,7 +1134,7 @@ public void testReadEntitiesByFields() throws Exception { Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve( null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS), null)); assertEquals(3, es1.size()); @@ -1182,7 +1165,7 @@ public void testReadEntitiesConfigPrefix() throws Exception { Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(list, null, null, null)); int cfgCnt = 0; for (TimelineEntity entity : es1) { @@ -1212,8 +1195,8 @@ public void testReadEntitiesConfigFilters() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(2, entities.size()); @@ -1226,8 +1209,8 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); cfgCnt = 0; @@ -1242,8 +1225,8 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList1, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList1) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(1, entities.size()); @@ -1261,8 +1244,8 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList2, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList2) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(0, entities.size()); @@ -1273,8 +1256,8 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList3, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList3) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(0, entities.size()); @@ -1285,8 +1268,8 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList4, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList4) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(0, entities.size()); @@ -1297,8 +1280,8 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList5, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList5) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null)); assertEquals(3, entities.size()); @@ -1315,8 +1298,8 @@ public void testReadEntitiesConfigFilterPrefix() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve(list, null, null, null)); assertEquals(1, entities.size()); int cfgCnt = 0; @@ -1346,8 +1329,8 @@ public void testReadEntitiesConfigFilterPrefix() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList1, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList1) + .build(), new TimelineDataToRetrieve(confsToRetrieve, null, null, null)); assertEquals(2, entities.size()); cfgCnt = 0; @@ -1375,7 +1358,7 @@ public void testReadEntitiesMetricPrefix() throws Exception { Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, list, null, null)); int metricCnt = 0; for (TimelineEntity entity : es1) { @@ -1403,8 +1386,8 @@ public void testReadEntitiesMetricFilters() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(2, entities.size()); @@ -1417,8 +1400,8 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); metricCnt = 0; @@ -1435,8 +1418,8 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(1, entities.size()); @@ -1454,8 +1437,8 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList2, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList2) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1466,8 +1449,8 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList3, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList3) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1478,8 +1461,8 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList4, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList4) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1490,8 +1473,8 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList5, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList5) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(3, entities.size()); @@ -1508,8 +1491,8 @@ public void testReadEntitiesMetricFilterPrefix() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve(null, list, null, null)); assertEquals(1, entities.size()); int metricCnt = 0; @@ -1537,8 +1520,8 @@ public void testReadEntitiesMetricFilterPrefix() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), new TimelineDataToRetrieve( null, metricsToRetrieve, EnumSet.of(Field.METRICS), null)); assertEquals(2, entities.size()); @@ -1556,8 +1539,10 @@ public void testReadEntitiesMetricFilterPrefix() throws Exception { entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", - "world", null), new TimelineEntityFilters(null, null, null, null, null, - null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, + "world", null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), + new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.METRICS), Integer.MAX_VALUE)); assertEquals(2, entities.size()); metricCnt = 0; @@ -1591,8 +1576,8 @@ public void testReadEntitiesInfoFilters() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(2, entities.size()); int infoCnt = 0; @@ -1607,8 +1592,8 @@ public void testReadEntitiesInfoFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList1) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(1, entities.size()); infoCnt = 0; @@ -1625,8 +1610,8 @@ public void testReadEntitiesInfoFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList2) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(0, entities.size()); @@ -1636,8 +1621,8 @@ public void testReadEntitiesInfoFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList3) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(0, entities.size()); @@ -1647,8 +1632,8 @@ public void testReadEntitiesInfoFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList4) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(0, entities.size()); @@ -1658,8 +1643,8 @@ public void testReadEntitiesInfoFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList5, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList5) + .build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); assertEquals(3, entities.size()); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java index c21e874..1930198 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java @@ -184,8 +184,7 @@ public void testWriteFlowRunMinMax() throws Exception { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, null, null, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), - new TimelineEntityFilters(10L, null, null, null, null, null, - null, null, null), + new TimelineEntityFilters.Builder().entityLimit(10L).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity e : entities) { @@ -249,8 +248,7 @@ public void testWriteFlowActivityOneFlow() throws Exception { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), - new TimelineEntityFilters(10L, null, null, null, null, null, - null, null, null), + new TimelineEntityFilters.Builder().entityLimit(10L).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity e : entities) { @@ -377,8 +375,7 @@ public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, null, null, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), - new TimelineEntityFilters(10L, null, null, null, null, null, - null, null, null), + new TimelineEntityFilters.Builder().entityLimit(10L).build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity e : entities) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java index 2a101cf..7d53850 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java @@ -608,7 +608,7 @@ public void testWriteFlowRunMetricsPrefix() throws Exception { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null)); assertEquals(2, entities.size()); int metricCnt = 0; @@ -669,7 +669,7 @@ public void testWriteFlowRunsMetricFields() throws Exception { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, runid, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(), + new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity timelineEntity : entities) { @@ -679,7 +679,8 @@ public void testWriteFlowRunsMetricFields() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, runid, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, + new TimelineEntityFilters.Builder().build(), + new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(1, entities.size()); for (TimelineEntity timelineEntity : entities) { @@ -850,8 +851,9 @@ public void testFilterFlowRunsByCreatedTime() throws Exception { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, 1425016501000L, 1425016502001L, null, - null, null, null, null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeBegin(1425016501000L) + .createdTimeEnd(1425016502001L).build(), + new TimelineDataToRetrieve()); assertEquals(2, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("user2@flow_name2/1002345678918") && @@ -863,8 +865,9 @@ public void testFilterFlowRunsByCreatedTime() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, 1425016501050L, null, null, null, - null, null, null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeBegin(1425016501050L) + .build(), + new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("user2@flow_name2/1002345678918")) { @@ -874,8 +877,9 @@ public void testFilterFlowRunsByCreatedTime() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, null, 1425016501050L, null, null, - null, null, null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeEnd(1425016501050L) + .build(), + new TimelineDataToRetrieve()); assertEquals(1, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("user2@flow_name2/1002345678919")) { @@ -941,8 +945,9 @@ public void testMetricFilters() throws Exception { Set entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), new TimelineDataToRetrieve(null, null, + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), + new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(2, entities.size()); int metricCnt = 0; @@ -958,8 +963,9 @@ public void testMetricFilters() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), new TimelineDataToRetrieve(null, null, + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), + new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(1, entities.size()); metricCnt = 0; @@ -974,8 +980,9 @@ public void testMetricFilters() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList2, null), new TimelineDataToRetrieve(null, null, + new TimelineEntityFilters.Builder().metricFilters(metricFilterList2) + .build(), + new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -984,8 +991,9 @@ public void testMetricFilters() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList3, null), new TimelineDataToRetrieve(null, null, + new TimelineEntityFilters.Builder().metricFilters(metricFilterList3) + .build(), + new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null)); assertEquals(0, entities.size()); @@ -1005,8 +1013,8 @@ public void testMetricFilters() throws Exception { entities = hbr.getEntities( new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList4, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList4) + .build(), new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.ALL), null)); assertEquals(2, entities.size()); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java index 4e8286d..9a67e2b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java @@ -92,8 +92,8 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { FilterList listBasedOnFilters = new FilterList(); // Create filter list based on created time range and add it to // listBasedOnFilters. - long createdTimeBegin = filters.getCreatedTimeBegin(); - long createdTimeEnd = filters.getCreatedTimeEnd(); + long createdTimeBegin = filters.createdTimeBegin; + long createdTimeEnd = filters.createdTimeEnd; if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) { listBasedOnFilters.addFilter( TimelineFilterUtils.createSingleColValueFiltersByRange( @@ -101,7 +101,7 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { } // Create filter list based on metric filters and add it to // listBasedOnFilters. - TimelineFilterList metricFilters = filters.getMetricFilters(); + TimelineFilterList metricFilters = filters.metricFilters; if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter( TimelineFilterUtils.createHBaseFilterList( @@ -109,14 +109,14 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { } // Create filter list based on config filters and add it to // listBasedOnFilters. - TimelineFilterList configFilters = filters.getConfigFilters(); + TimelineFilterList configFilters = filters.configFilters; if (configFilters != null && !configFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter( TimelineFilterUtils.createHBaseFilterList( ApplicationColumnPrefix.CONFIG, configFilters)); } // Create filter list based on info filters and add it to listBasedOnFilters - TimelineFilterList infoFilters = filters.getInfoFilters(); + TimelineFilterList infoFilters = filters.infoFilters; if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter( TimelineFilterUtils.createHBaseFilterList( @@ -159,7 +159,7 @@ private FilterList createFilterListForColsOfInfoFamily() TimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, ApplicationColumnPrefix.INFO)); } - TimelineFilterList relatesTo = getFilters().getRelatesTo(); + TimelineFilterList relatesTo = getFilters().relatesTo; if (hasField(fieldsToRetrieve, Field.RELATES_TO)) { // If RELATES_TO field has to be retrieved, add a filter for fetching // columns with RELATES_TO column prefix. @@ -176,7 +176,7 @@ private FilterList createFilterListForColsOfInfoFamily() infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( ApplicationColumnPrefix.RELATES_TO, relatesToCols)); } - TimelineFilterList isRelatedTo = getFilters().getIsRelatedTo(); + TimelineFilterList isRelatedTo = getFilters().isRelatedTo; if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) { // If IS_RELATED_TO field has to be retrieved, add a filter for fetching // columns with IS_RELATED_TO column prefix. @@ -193,7 +193,7 @@ private FilterList createFilterListForColsOfInfoFamily() infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( ApplicationColumnPrefix.IS_RELATED_TO, isRelatedToCols)); } - TimelineFilterList eventFilters = getFilters().getEventFilters(); + TimelineFilterList eventFilters = getFilters().eventFilters; if (hasField(fieldsToRetrieve, Field.EVENTS)) { // If EVENTS field has to be retrieved, add a filter for fetching columns // with EVENT column prefix. @@ -366,7 +366,7 @@ protected ResultScanner getResults(Configuration hbaseConf, // Whether or not flowRunID is null doesn't matter, the // ApplicationRowKeyPrefix will do the right thing. // default mode, will always scans from beginning of entity type. - if (getFilters().getFromId() == null) { + if (getFilters().fromId == null) { applicationRowKeyPrefix = new ApplicationRowKeyPrefix( context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId()); @@ -375,7 +375,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Long flowRunId = context.getFlowRunId(); if (flowRunId == null) { AppToFlowRowKey appToFlowRowKey = new AppToFlowRowKey( - getFilters().getFromId()); + getFilters().fromId); FlowContext flowContext = lookupFlowContext(appToFlowRowKey, context.getClusterId(), hbaseConf, conn); flowRunId = flowContext.getFlowRunId(); @@ -383,7 +383,7 @@ protected ResultScanner getResults(Configuration hbaseConf, ApplicationRowKey applicationRowKey = new ApplicationRowKey(context.getClusterId(), context.getUserId(), - context.getFlowName(), flowRunId, getFilters().getFromId()); + context.getFlowName(), flowRunId, getFilters().fromId); // set start row scan.setStartRow(applicationRowKey.getRowKey()); @@ -400,7 +400,7 @@ protected ResultScanner getResults(Configuration hbaseConf, } FilterList newList = new FilterList(); - newList.addFilter(new PageFilter(getFilters().getLimit())); + newList.addFilter(new PageFilter(getFilters().limit)); if (filterList != null && !filterList.getFilters().isEmpty()) { newList.addFilter(filterList); } @@ -430,13 +430,13 @@ protected TimelineEntity parseEntity(Result result) throws IOException { // locally as relevant HBase filters to filter out rows on the basis of // isRelatedTo are not set in HBase scan. boolean checkIsRelatedTo = - !isSingleEntityRead() && filters.getIsRelatedTo() != null && - filters.getIsRelatedTo().getFilterList().size() > 0; + !isSingleEntityRead() && filters.isRelatedTo != null && + filters.isRelatedTo.getFilterList().size() > 0; if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO) || checkIsRelatedTo) { readRelationship(entity, result, ApplicationColumnPrefix.IS_RELATED_TO, true); if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity, - filters.getIsRelatedTo())) { + filters.isRelatedTo)) { return null; } if (!hasField(fieldsToRetrieve, @@ -450,14 +450,14 @@ protected TimelineEntity parseEntity(Result result) throws IOException { // locally as relevant HBase filters to filter out rows on the basis of // relatesTo are not set in HBase scan. boolean checkRelatesTo = - !isSingleEntityRead() && filters.getRelatesTo() != null && - filters.getRelatesTo().getFilterList().size() > 0; + !isSingleEntityRead() && filters.relatesTo != null && + filters.relatesTo.getFilterList().size() > 0; if (hasField(fieldsToRetrieve, Field.RELATES_TO) || checkRelatesTo) { readRelationship(entity, result, ApplicationColumnPrefix.RELATES_TO, false); if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity, - filters.getRelatesTo())) { + filters.relatesTo)) { return null; } if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) { @@ -480,12 +480,12 @@ protected TimelineEntity parseEntity(Result result) throws IOException { // as relevant HBase filters to filter out rows on the basis of events // are not set in HBase scan. boolean checkEvents = - !isSingleEntityRead() && filters.getEventFilters() != null && - filters.getEventFilters().getFilterList().size() > 0; + !isSingleEntityRead() && filters.eventFilters != null && + filters.eventFilters.getFilterList().size() > 0; if (hasField(fieldsToRetrieve, Field.EVENTS) || checkEvents) { readEvents(entity, result, ApplicationColumnPrefix.EVENT); if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity, - filters.getEventFilters())) { + filters.eventFilters)) { return null; } if (!hasField(fieldsToRetrieve, Field.EVENTS)) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java index a1cdb29..2c67e7f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java @@ -112,17 +112,17 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, FilterList filterList) throws IOException { Scan scan = new Scan(); String clusterId = getContext().getClusterId(); - if (getFilters().getFromId() == null - && getFilters().getCreatedTimeBegin() == 0L - && getFilters().getCreatedTimeEnd() == Long.MAX_VALUE) { + if (getFilters().fromId == null + && getFilters().createdTimeBegin == 0L + && getFilters().createdTimeEnd == Long.MAX_VALUE) { // All records have to be chosen. scan.setRowPrefixFilter(new FlowActivityRowKeyPrefix(clusterId) .getRowKeyPrefix()); - } else if (getFilters().getFromId() != null) { + } else if (getFilters().fromId != null) { FlowActivityRowKey key = null; try { key = - FlowActivityRowKey.parseRowKeyFromString(getFilters().getFromId()); + FlowActivityRowKey.parseRowKeyFromString(getFilters().fromId); } catch (IllegalArgumentException e) { throw new BadRequestException("Invalid filter fromid is provided."); } @@ -133,20 +133,20 @@ protected ResultScanner getResults(Configuration hbaseConf, scan.setStartRow(key.getRowKey()); scan.setStopRow( new FlowActivityRowKeyPrefix(clusterId, - (getFilters().getCreatedTimeBegin() <= 0 ? 0 - : (getFilters().getCreatedTimeBegin() - 1))) + (getFilters().createdTimeBegin <= 0 ? 0 + : (getFilters().createdTimeBegin - 1))) .getRowKeyPrefix()); } else { - scan.setStartRow(new FlowActivityRowKeyPrefix(clusterId, getFilters() - .getCreatedTimeEnd()).getRowKeyPrefix()); - scan.setStopRow(new FlowActivityRowKeyPrefix(clusterId, (getFilters() - .getCreatedTimeBegin() <= 0 ? 0 - : (getFilters().getCreatedTimeBegin() - 1))).getRowKeyPrefix()); + scan.setStartRow(new FlowActivityRowKeyPrefix(clusterId, + getFilters().createdTimeEnd).getRowKeyPrefix()); + scan.setStopRow(new FlowActivityRowKeyPrefix(clusterId, + (getFilters().createdTimeBegin <= 0 ? 0 + : (getFilters().createdTimeBegin - 1))).getRowKeyPrefix()); } // use the page filter to limit the result to the page size // the scanner may still return more than the limit; therefore we need to // read the right number as we iterate - scan.setFilter(new PageFilter(getFilters().getLimit())); + scan.setFilter(new PageFilter(getFilters().limit)); return getTable().getResultScanner(hbaseConf, conn, scan); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java index cedf96a..41a042c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java @@ -118,15 +118,15 @@ protected void augmentParams(Configuration hbaseConf, Connection conn) { protected FilterList constructFilterListBasedOnFilters() throws IOException { FilterList listBasedOnFilters = new FilterList(); // Filter based on created time range. - Long createdTimeBegin = getFilters().getCreatedTimeBegin(); - Long createdTimeEnd = getFilters().getCreatedTimeEnd(); + Long createdTimeBegin = getFilters().createdTimeBegin; + Long createdTimeEnd = getFilters().createdTimeEnd; if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) { listBasedOnFilters.addFilter(TimelineFilterUtils .createSingleColValueFiltersByRange(FlowRunColumn.MIN_START_TIME, createdTimeBegin, createdTimeEnd)); } // Filter based on metric filters. - TimelineFilterList metricFilters = getFilters().getMetricFilters(); + TimelineFilterList metricFilters = getFilters().metricFilters; if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( FlowRunColumnPrefix.METRIC, metricFilters)); @@ -212,7 +212,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, Scan scan = new Scan(); TimelineReaderContext context = getContext(); RowKeyPrefix flowRunRowKeyPrefix = null; - if (getFilters().getFromId() == null) { + if (getFilters().fromId == null) { flowRunRowKeyPrefix = new FlowRunRowKeyPrefix(context.getClusterId(), context.getUserId(), context.getFlowName()); scan.setRowPrefixFilter(flowRunRowKeyPrefix.getRowKeyPrefix()); @@ -220,7 +220,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, FlowRunRowKey flowRunRowKey = new FlowRunRowKey(context.getClusterId(), context.getUserId(), - context.getFlowName(), Long.parseLong(getFilters().getFromId())); + context.getFlowName(), Long.parseLong(getFilters().fromId)); // set start row scan.setStartRow(flowRunRowKey.getRowKey()); @@ -236,7 +236,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, } FilterList newList = new FilterList(); - newList.addFilter(new PageFilter(getFilters().getLimit())); + newList.addFilter(new PageFilter(getFilters().limit)); if (filterList != null && !filterList.getFilters().isEmpty()) { newList.addFilter(filterList); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java index f6904c5..63e4842 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java @@ -97,8 +97,8 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { TimelineEntityFilters filters = getFilters(); // Create filter list based on created time range and add it to // listBasedOnFilters. - long createdTimeBegin = filters.getCreatedTimeBegin(); - long createdTimeEnd = filters.getCreatedTimeEnd(); + long createdTimeBegin = filters.createdTimeBegin; + long createdTimeEnd = filters.createdTimeEnd; if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) { listBasedOnFilters.addFilter(TimelineFilterUtils .createSingleColValueFiltersByRange(EntityColumn.CREATED_TIME, @@ -106,20 +106,20 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { } // Create filter list based on metric filters and add it to // listBasedOnFilters. - TimelineFilterList metricFilters = filters.getMetricFilters(); + TimelineFilterList metricFilters = filters.metricFilters; if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( EntityColumnPrefix.METRIC, metricFilters)); } // Create filter list based on config filters and add it to // listBasedOnFilters. - TimelineFilterList configFilters = filters.getConfigFilters(); + TimelineFilterList configFilters = filters.configFilters; if (configFilters != null && !configFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( EntityColumnPrefix.CONFIG, configFilters)); } // Create filter list based on info filters and add it to listBasedOnFilters - TimelineFilterList infoFilters = filters.getInfoFilters(); + TimelineFilterList infoFilters = filters.infoFilters; if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( EntityColumnPrefix.INFO, infoFilters)); @@ -170,9 +170,9 @@ private boolean fetchPartialIsRelatedToCols(TimelineFilterList isRelatedTo, protected boolean fetchPartialColsFromInfoFamily() { EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); TimelineEntityFilters filters = getFilters(); - return fetchPartialEventCols(filters.getEventFilters(), fieldsToRetrieve) - || fetchPartialRelatesToCols(filters.getRelatesTo(), fieldsToRetrieve) - || fetchPartialIsRelatedToCols(filters.getIsRelatedTo(), + return fetchPartialEventCols(filters.eventFilters, fieldsToRetrieve) + || fetchPartialRelatesToCols(filters.relatesTo, fieldsToRetrieve) + || fetchPartialIsRelatedToCols(filters.isRelatedTo, fieldsToRetrieve); } @@ -203,11 +203,11 @@ protected boolean needCreateFilterListBasedOnFields() { if (!flag && !isSingleEntityRead()) { TimelineEntityFilters filters = getFilters(); flag = - (filters.getEventFilters() != null && !filters.getEventFilters() + (filters.eventFilters != null && !filters.eventFilters .getFilterList().isEmpty()) - || (filters.getIsRelatedTo() != null && !filters.getIsRelatedTo() + || (filters.isRelatedTo != null && !filters.isRelatedTo .getFilterList().isEmpty()) - || (filters.getRelatesTo() != null && !filters.getRelatesTo() + || (filters.relatesTo != null && !filters.relatesTo .getFilterList().isEmpty()); } return flag; @@ -247,7 +247,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { .addFilter(TimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, EntityColumnPrefix.INFO)); } - TimelineFilterList relatesTo = getFilters().getRelatesTo(); + TimelineFilterList relatesTo = getFilters().relatesTo; if (hasField(fieldsToRetrieve, Field.RELATES_TO)) { // If RELATES_TO field has to be retrieved, add a filter for fetching // columns with RELATES_TO column prefix. @@ -264,7 +264,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( EntityColumnPrefix.RELATES_TO, relatesToCols)); } - TimelineFilterList isRelatedTo = getFilters().getIsRelatedTo(); + TimelineFilterList isRelatedTo = getFilters().isRelatedTo; if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) { // If IS_RELATED_TO field has to be retrieved, add a filter for fetching // columns with IS_RELATED_TO column prefix. @@ -281,7 +281,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( EntityColumnPrefix.IS_RELATED_TO, isRelatedToCols)); } - TimelineFilterList eventFilters = getFilters().getEventFilters(); + TimelineFilterList eventFilters = getFilters().eventFilters; if (hasField(fieldsToRetrieve, Field.EVENTS)) { // If EVENTS field has to be retrieved, add a filter for fetching columns // with EVENT column prefix. @@ -475,7 +475,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, TimelineReaderContext context = getContext(); RowKeyPrefix entityRowKeyPrefix = null; // default mode, will always scans from beginning of entity type. - if (getFilters() == null || getFilters().getFromIdPrefix() == null) { + if (getFilters() == null || getFilters().fromIdPrefix == null) { entityRowKeyPrefix = new EntityRowKeyPrefix(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId(), context.getAppId(), context.getEntityType(), null, null); @@ -484,7 +484,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, entityRowKeyPrefix = new EntityRowKeyPrefix(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId(), context.getAppId(), context.getEntityType(), - getFilters().getFromIdPrefix(), getFilters().getFromId()); + getFilters().fromIdPrefix, getFilters().fromId); // set start row scan.setStartRow(entityRowKeyPrefix.getRowKeyPrefix()); @@ -501,7 +501,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn, // set page filter to limit. This filter has to set only in pagination // mode. - filterList.addFilter(new PageFilter(getFilters().getLimit())); + filterList.addFilter(new PageFilter(getFilters().limit)); } scan.setMaxVersions(getDataToRetrieve().getMetricsLimit()); if (filterList != null && !filterList.getFilters().isEmpty()) { @@ -532,13 +532,13 @@ protected TimelineEntity parseEntity(Result result) throws IOException { // locally as relevant HBase filters to filter out rows on the basis of // isRelatedTo are not set in HBase scan. boolean checkIsRelatedTo = - !isSingleEntityRead() && filters.getIsRelatedTo() != null - && filters.getIsRelatedTo().getFilterList().size() > 0; + !isSingleEntityRead() && filters.isRelatedTo != null + && filters.isRelatedTo.getFilterList().size() > 0; if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO) || checkIsRelatedTo) { readRelationship(entity, result, EntityColumnPrefix.IS_RELATED_TO, true); if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity, - filters.getIsRelatedTo())) { + filters.isRelatedTo)) { return null; } if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) { @@ -551,14 +551,14 @@ protected TimelineEntity parseEntity(Result result) throws IOException { // locally as relevant HBase filters to filter out rows on the basis of // relatesTo are not set in HBase scan. boolean checkRelatesTo = - !isSingleEntityRead() && filters.getRelatesTo() != null - && filters.getRelatesTo().getFilterList().size() > 0; + !isSingleEntityRead() && filters.relatesTo != null + && filters.relatesTo.getFilterList().size() > 0; if (hasField(fieldsToRetrieve, Field.RELATES_TO) || checkRelatesTo) { readRelationship(entity, result, EntityColumnPrefix.RELATES_TO, false); if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity, - filters.getRelatesTo())) { + filters.relatesTo)) { return null; } if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) { @@ -581,13 +581,13 @@ protected TimelineEntity parseEntity(Result result) throws IOException { // as relevant HBase filters to filter out rows on the basis of events // are not set in HBase scan. boolean checkEvents = - !isSingleEntityRead() && filters.getEventFilters() != null - && filters.getEventFilters().getFilterList().size() > 0; + !isSingleEntityRead() && filters.eventFilters != null + && filters.eventFilters.getFilterList().size() > 0; if (hasField(fieldsToRetrieve, Field.EVENTS) || checkEvents) { readEvents(entity, result, EntityColumnPrefix.EVENT); if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity, - filters.getEventFilters())) { + filters.eventFilters)) { return null; } if (!hasField(fieldsToRetrieve, Field.EVENTS)) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java index 4c88cd3..f5d9535 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java @@ -185,7 +185,7 @@ protected TimelineEntityFilters getFilters() { */ protected void createFiltersIfNull() { if (filters == null) { - filters = new TimelineEntityFilters(); + filters = new TimelineEntityFilters.Builder().build(); } } @@ -245,7 +245,7 @@ public TimelineEntity readEntity(Configuration hbaseConf, Connection conn) continue; } entities.add(entity); - if (entities.size() == filters.getLimit()) { + if (entities.size() == filters.limit) { break; } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java index 79a83c6..043bf4f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java @@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; @@ -117,19 +118,19 @@ @Private @Unstable public class TimelineEntityFilters { - private long limit; - private long createdTimeBegin; - private long createdTimeEnd; - private TimelineFilterList relatesTo; - private TimelineFilterList isRelatedTo; - private TimelineFilterList infoFilters; - private TimelineFilterList configFilters; - private TimelineFilterList metricFilters; - private TimelineFilterList eventFilters; - private Long fromIdPrefix; - private String fromId; - private static final long DEFAULT_BEGIN_TIME = 0L; - private static final long DEFAULT_END_TIME = Long.MAX_VALUE; + public final long limit; + public final long createdTimeBegin; + public final long createdTimeEnd; + public final TimelineFilterList relatesTo; + public final TimelineFilterList isRelatedTo; + public final TimelineFilterList infoFilters; + public final TimelineFilterList configFilters; + public final TimelineFilterList metricFilters; + public final TimelineFilterList eventFilters; + public final Long fromIdPrefix; + public final String fromId; + public static final long DEFAULT_BEGIN_TIME = 0L; + public static final long DEFAULT_END_TIME = Long.MAX_VALUE; /** @@ -137,31 +138,21 @@ */ public static final long DEFAULT_LIMIT = 100; - public TimelineEntityFilters() { - this(null, null, null, null, null, null, null, null, null); + private TimelineEntityFilters(Builder builder) { + this(builder.limit, builder.createdTimeBegin, builder.createdTimeEnd, + builder.relatesTo, builder.isRelatedTo, builder.infoFilters, + builder.configFilters, builder.metricFilters, + builder.eventFilters, builder.fromIdPrefix, builder.fromId); } - public TimelineEntityFilters(Long entityLimit, Long timeBegin, Long timeEnd, - TimelineFilterList entityRelatesTo, TimelineFilterList entityIsRelatedTo, - TimelineFilterList entityInfoFilters, - TimelineFilterList entityConfigFilters, - TimelineFilterList entityMetricFilters, - TimelineFilterList entityEventFilters, Long fromidprefix, String fromid) { - this(entityLimit, timeBegin, timeEnd, entityRelatesTo, entityIsRelatedTo, - entityInfoFilters, entityConfigFilters, entityMetricFilters, - entityEventFilters); - this.fromIdPrefix = fromidprefix; - this.fromId = fromid; - } - - public TimelineEntityFilters( + private TimelineEntityFilters( Long entityLimit, Long timeBegin, Long timeEnd, TimelineFilterList entityRelatesTo, TimelineFilterList entityIsRelatedTo, TimelineFilterList entityInfoFilters, TimelineFilterList entityConfigFilters, TimelineFilterList entityMetricFilters, - TimelineFilterList entityEventFilters) { + TimelineFilterList entityEventFilters, Long fromIdPrefix, String fromId) { if (entityLimit == null || entityLimit < 0) { this.limit = DEFAULT_LIMIT; } else { @@ -183,105 +174,80 @@ public TimelineEntityFilters( this.configFilters = entityConfigFilters; this.metricFilters = entityMetricFilters; this.eventFilters = entityEventFilters; + this.fromIdPrefix = fromIdPrefix; + this.fromId = fromId; } - public long getLimit() { - return limit; - } - - public void setLimit(Long entityLimit) { - if (entityLimit == null || entityLimit < 0) { - this.limit = DEFAULT_LIMIT; - } else { - this.limit = entityLimit; + public static class Builder { + private Long limit; + private Long createdTimeBegin; + private Long createdTimeEnd; + private TimelineFilterList relatesTo; + private TimelineFilterList isRelatedTo; + private TimelineFilterList infoFilters; + private TimelineFilterList configFilters; + private TimelineFilterList metricFilters; + private TimelineFilterList eventFilters; + private Long fromIdPrefix; + private String fromId; + + public Builder entityLimit(Long limit) { + this.limit = limit; + return this; } - } - - public long getCreatedTimeBegin() { - return createdTimeBegin; - } - public void setCreatedTimeBegin(Long timeBegin) { - if (timeBegin == null || timeBegin < 0) { - this.createdTimeBegin = DEFAULT_BEGIN_TIME; - } else { + public Builder createdTimeBegin(Long timeBegin) { this.createdTimeBegin = timeBegin; + return this; } - } - public long getCreatedTimeEnd() { - return createdTimeEnd; - } - - public void setCreatedTimeEnd(Long timeEnd) { - if (timeEnd == null || timeEnd < 0) { - this.createdTimeEnd = DEFAULT_END_TIME; - } else { + public Builder createdTimeEnd(Long timeEnd) { this.createdTimeEnd = timeEnd; + return this; } - } - - public TimelineFilterList getRelatesTo() { - return relatesTo; - } - - public void setRelatesTo(TimelineFilterList relations) { - this.relatesTo = relations; - } - public TimelineFilterList getIsRelatedTo() { - return isRelatedTo; - } - - public void setIsRelatedTo(TimelineFilterList relations) { - this.isRelatedTo = relations; - } - - public TimelineFilterList getInfoFilters() { - return infoFilters; - } - - public void setInfoFilters(TimelineFilterList filters) { - this.infoFilters = filters; - } - - public TimelineFilterList getConfigFilters() { - return configFilters; - } - - public void setConfigFilters(TimelineFilterList filters) { - this.configFilters = filters; - } + public Builder relatesTo(TimelineFilterList relations) { + this.relatesTo = relations; + return this; + } - public TimelineFilterList getMetricFilters() { - return metricFilters; - } + public Builder isRelatedTo(TimelineFilterList relations) { + this.isRelatedTo = relations; + return this; + } - public void setMetricFilters(TimelineFilterList filters) { - this.metricFilters = filters; - } + public Builder infoFilters(TimelineFilterList filters) { + this.infoFilters = filters; + return this; + } - public TimelineFilterList getEventFilters() { - return eventFilters; - } + public Builder configFilter(TimelineFilterList filters) { + this.configFilters = filters; + return this; + } - public void setEventFilters(TimelineFilterList filters) { - this.eventFilters = filters; - } + public Builder metricFilters(TimelineFilterList filters) { + this.metricFilters = filters; + return this; + } - public String getFromId() { - return fromId; - } + public Builder eventFilters(TimelineFilterList filters) { + this.eventFilters = filters; + return this; + } - public void setFromId(String fromId) { - this.fromId = fromId; - } + public Builder fromId(String fromId) { + this.fromId = fromId; + return this; + } - public Long getFromIdPrefix() { - return fromIdPrefix; - } + public Builder fromIdPrefix(Long fromIdPrefix) { + this.fromIdPrefix = fromIdPrefix; + return this; + } - public void setFromIdPrefix(Long fromIdPrefix) { - this.fromIdPrefix = fromIdPrefix; + public TimelineEntityFilters build() { + return new TimelineEntityFilters(this); + } } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java index d8798f6..ad7e1ba 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java @@ -1423,10 +1423,8 @@ public TimelineEntity getFlowRun( DateRange range = parseDateRange(dateRange); TimelineEntityFilters entityFilters = TimelineReaderWebServicesUtils.createTimelineEntityFilters( - limit, null, null, null, null, null, null, null, null, null, - fromId); - entityFilters.setCreatedTimeBegin(range.dateStart); - entityFilters.setCreatedTimeEnd(range.dateEnd); + limit, range.dateStart, range.dateEnd, + null, null, null, null, null, null, null, fromId); entities = timelineReaderManager.getEntities( TimelineReaderWebServicesUtils.createTimelineReaderContext( clusterId, null, null, null, null, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java index 1a518d0..8a8954a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java @@ -76,12 +76,52 @@ static TimelineEntityFilters createTimelineEntityFilters(String limit, String isRelatedTo, String infofilters, String conffilters, String metricfilters, String eventfilters, String fromidprefix, String fromid) throws TimelineParseException { - return new TimelineEntityFilters(parseLongStr(limit), - parseLongStr(createdTimeStart), parseLongStr(createdTimeEnd), - parseRelationFilters(relatesTo), parseRelationFilters(isRelatedTo), - parseKVFilters(infofilters, false), parseKVFilters(conffilters, true), - parseMetricFilters(metricfilters), parseEventFilters(eventfilters), - parseLongStr(fromidprefix), parseStr(fromid)); + return new TimelineEntityFilters.Builder() + .entityLimit(parseLongStr(limit)) + .createdTimeBegin(parseLongStr(createdTimeStart)) + .createdTimeEnd(parseLongStr(createdTimeEnd)) + .relatesTo(parseRelationFilters(relatesTo)) + .isRelatedTo(parseRelationFilters(isRelatedTo)) + .infoFilters(parseKVFilters(infofilters, false)) + .configFilter(parseKVFilters(conffilters, true)) + .metricFilters(parseMetricFilters(metricfilters)) + .eventFilters( parseEventFilters(eventfilters)) + .fromIdPrefix(parseLongStr(fromidprefix)) + .fromId(parseStr(fromid)).build(); + } + + /** + * Parse the passed filters represented as strings and convert them into a + * {@link TimelineEntityFilters} object. + * @param limit Limit to number of entities to return. + * @param createdTimeStart Created time start for the entities to return. + * @param createdTimeEnd Created time end for the entities to return. + * @param relatesTo Entities to return must match relatesTo. + * @param isRelatedTo Entities to return must match isRelatedTo. + * @param infofilters Entities to return must match these info filters. + * @param conffilters Entities to return must match these metric filters. + * @param metricfilters Entities to return must match these metric filters. + * @param eventfilters Entities to return must match these event filters. + * @return a {@link TimelineEntityFilters} object. + * @throws TimelineParseException if any problem occurs during parsing. + */ + static TimelineEntityFilters createTimelineEntityFilters(String limit, + Long createdTimeStart, Long createdTimeEnd, String relatesTo, + String isRelatedTo, String infofilters, String conffilters, + String metricfilters, String eventfilters, String fromidprefix, + String fromid) throws TimelineParseException { + return new TimelineEntityFilters.Builder() + .entityLimit(parseLongStr(limit)) + .createdTimeBegin(createdTimeStart) + .createdTimeEnd(createdTimeEnd) + .relatesTo(parseRelationFilters(relatesTo)) + .isRelatedTo(parseRelationFilters(isRelatedTo)) + .infoFilters(parseKVFilters(infofilters, false)) + .configFilter(parseKVFilters(conffilters, true)) + .metricFilters(parseMetricFilters(metricfilters)) + .eventFilters( parseEventFilters(eventfilters)) + .fromIdPrefix(parseLongStr(fromidprefix)) + .fromId(parseStr(fromid)).build(); } /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java index 6612192..dbbfc3e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java @@ -293,43 +293,43 @@ public int compare(Long l1, Long l2) { continue; } if (!isTimeInRange(entity.getCreatedTime(), - filters.getCreatedTimeBegin(), filters.getCreatedTimeEnd())) { + filters.createdTimeBegin, filters.createdTimeEnd)) { continue; } - if (filters.getRelatesTo() != null && - !filters.getRelatesTo().getFilterList().isEmpty() && + if (filters.relatesTo != null && + !filters.relatesTo.getFilterList().isEmpty() && !TimelineStorageUtils.matchRelatesTo(entity, - filters.getRelatesTo())) { + filters.relatesTo)) { continue; } - if (filters.getIsRelatedTo() != null && - !filters.getIsRelatedTo().getFilterList().isEmpty() && + if (filters.isRelatedTo != null && + !filters.isRelatedTo.getFilterList().isEmpty() && !TimelineStorageUtils.matchIsRelatedTo(entity, - filters.getIsRelatedTo())) { + filters.isRelatedTo)) { continue; } - if (filters.getInfoFilters() != null && - !filters.getInfoFilters().getFilterList().isEmpty() && + if (filters.infoFilters != null && + !filters.infoFilters.getFilterList().isEmpty() && !TimelineStorageUtils.matchInfoFilters(entity, - filters.getInfoFilters())) { + filters.infoFilters)) { continue; } - if (filters.getConfigFilters() != null && - !filters.getConfigFilters().getFilterList().isEmpty() && + if (filters.configFilters != null && + !filters.configFilters.getFilterList().isEmpty() && !TimelineStorageUtils.matchConfigFilters(entity, - filters.getConfigFilters())) { + filters.configFilters)) { continue; } - if (filters.getMetricFilters() != null && - !filters.getMetricFilters().getFilterList().isEmpty() && + if (filters.metricFilters != null && + !filters.metricFilters.getFilterList().isEmpty() && !TimelineStorageUtils.matchMetricFilters(entity, - filters.getMetricFilters())) { + filters.metricFilters)) { continue; } - if (filters.getEventFilters() != null && - !filters.getEventFilters().getFilterList().isEmpty() && + if (filters.eventFilters != null && + !filters.eventFilters.getFilterList().isEmpty() && !TimelineStorageUtils.matchEventFilters(entity, - filters.getEventFilters())) { + filters.eventFilters)) { continue; } TimelineEntity entityToBeReturned = createEntityToBeReturned( @@ -351,7 +351,7 @@ public int compare(Long l1, Long l2) { for (TimelineEntity entity : entitySet) { entities.add(entity); ++entitiesAdded; - if (entitiesAdded >= filters.getLimit()) { + if (entitiesAdded >= filters.limit) { return entities; } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java index 35af169..d621896 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java @@ -398,7 +398,7 @@ public void testGetEntityAllFields() throws Exception { public void testGetAllEntities() throws Exception { Set result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", - "app", null), new TimelineEntityFilters(), + "app", null), new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); // All 4 entities will be returned Assert.assertEquals(4, result.size()); @@ -409,8 +409,8 @@ public void testGetEntitiesWithLimit() throws Exception { Set result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(2L, null, null, null, null, null, null, - null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().entityLimit(2L).build(), + new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); // Needs to be rewritten once hashcode and equals for // TimelineEntity is implemented @@ -424,8 +424,8 @@ public void testGetEntitiesWithLimit() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(3L, null, null, null, null, null, null, - null, null), new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().entityLimit(3L).build(), + new TimelineDataToRetrieve()); // Even though 2 entities out of 4 have same created time, one entity // is left out due to limit Assert.assertEquals(3, result.size()); @@ -437,8 +437,8 @@ public void testGetEntitiesByTimeWindows() throws Exception { Set result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, 1425016502030L, 1425016502060L, null, - null, null, null, null, null), + new TimelineEntityFilters.Builder().createdTimeBegin(1425016502030L) + .createdTimeEnd(1425016502060L).build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); // Only one entity with ID id_4 should be returned. @@ -452,9 +452,9 @@ public void testGetEntitiesByTimeWindows() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, 1425016502010L, null, null, - null, null, null, null), - new TimelineDataToRetrieve()); + new TimelineEntityFilters.Builder().createdTimeEnd(1425016502010L) + .build(), + new TimelineDataToRetrieve()); Assert.assertEquals(3, result.size()); for (TimelineEntity entity : result) { if (entity.getId().equals("id_4")) { @@ -466,8 +466,8 @@ public void testGetEntitiesByTimeWindows() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, 1425016502010L, null, null, null, - null, null, null, null), + new TimelineEntityFilters.Builder().createdTimeBegin(1425016502010L) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { @@ -486,8 +486,8 @@ public void testGetFilteredEntities() throws Exception { Set result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); // Only one entity with ID id_3 should be returned. @@ -506,8 +506,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { @@ -525,8 +525,7 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - null, eventFilters), + new TimelineEntityFilters.Builder().eventFilters(eventFilters).build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { @@ -542,8 +541,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); // Two entities with IDs' id_1 and id_2 should be returned. @@ -569,8 +568,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList1, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList1) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); for (TimelineEntity entity : result) { @@ -592,8 +591,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList2, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList2) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); for (TimelineEntity entity : result) { @@ -610,8 +609,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList3, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList3) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for(TimelineEntity entity : result) { @@ -628,8 +627,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList4, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList4) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(0, result.size()); @@ -641,8 +640,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, - confFilterList5, null, null), + new TimelineEntityFilters.Builder().configFilter(confFilterList5) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { @@ -665,8 +664,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList1, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); // Two entities with IDs' id_2 and id_3 should be returned. @@ -684,8 +683,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList2, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList2) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { @@ -702,8 +701,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList3, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList3) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(0, result.size()); @@ -715,8 +714,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList4, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList4) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); for (TimelineEntity entity : result) { @@ -731,8 +730,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, null, null, - metricFilterList5, null), + new TimelineEntityFilters.Builder().metricFilters(metricFilterList5) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); for (TimelineEntity entity : result) { @@ -749,8 +748,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList1) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(0, result.size()); @@ -762,8 +761,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList2) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); for (TimelineEntity entity : result) { @@ -780,8 +779,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList3) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(0, result.size()); @@ -793,8 +792,8 @@ public void testGetFilteredEntities() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, - null, null, null), + new TimelineEntityFilters.Builder().infoFilters(infoFilterList4) + .build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { @@ -815,8 +814,7 @@ public void testGetEntitiesByRelations() throws Exception { Set result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, relatesTo, null, null, - null, null, null), + new TimelineEntityFilters.Builder().relatesTo(relatesTo).build(), new TimelineDataToRetrieve()); Assert.assertEquals(1, result.size()); // Only one entity with ID id_1 should be returned. @@ -835,8 +833,7 @@ public void testGetEntitiesByRelations() throws Exception { result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), - new TimelineEntityFilters(null, null, null, null, isRelatedTo, null, - null, null, null), + new TimelineEntityFilters.Builder().isRelatedTo(isRelatedTo).build(), new TimelineDataToRetrieve()); Assert.assertEquals(2, result.size()); // Two entities with IDs' id_1 and id_3 should be returned.