diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 8752e5d..dc5cb69 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -2047,6 +2047,14 @@ public static boolean isAclEnabled(Configuration conf) { + "hbase.coprocessor.app-final-value-retention-milliseconds"; /** + * The name of the setting for the location of the coprocessor + * jar on hdfs + */ + public static final String FLOW_RUN_COPROCESSOR_JAR_HDFS_LOCATION = + TIMELINE_SERVICE_PREFIX + + "hbase.coprocessor.jar.hdfs.location"; + + /** * The name for setting that points to an optional HBase configuration * (hbase-site.xml file) with settings that will override the ones found on * the classpath. diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java index 1b66fcb..e521bdc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java @@ -51,8 +51,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; -import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; import org.junit.After; import org.junit.AfterClass; @@ -86,7 +86,7 @@ public static void setup() throws Exception { Configuration conf = util.getConfiguration(); conf.setInt("hfile.format.version", 3); util.startMiniCluster(); - TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); + DataGeneratorForTest.createSchema(conf); loadData(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java deleted file mode 100644 index b56a752..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java +++ /dev/null @@ -1,424 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.timelineservice.storage; - -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric.Type; -import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; - -final class DataGeneratorForTest { - static void loadApps(HBaseTestingUtility util) throws IOException { - TimelineEntities te = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - String id = "application_1111111111_2222"; - entity.setId(id); - entity.setType(TimelineEntityType.YARN_APPLICATION.toString()); - Long cTime = 1425016502000L; - entity.setCreatedTime(cTime); - // add the info map in Timeline Entity - Map infoMap = new HashMap<>(); - infoMap.put("infoMapKey1", "infoMapValue2"); - infoMap.put("infoMapKey2", 20); - infoMap.put("infoMapKey3", 85.85); - entity.addInfo(infoMap); - // add the isRelatedToEntity info - Set isRelatedToSet = new HashSet<>(); - isRelatedToSet.add("relatedto1"); - Map> isRelatedTo = new HashMap<>(); - isRelatedTo.put("task", isRelatedToSet); - entity.setIsRelatedToEntities(isRelatedTo); - // add the relatesTo info - Set relatesToSet = new HashSet<>(); - relatesToSet.add("relatesto1"); - relatesToSet.add("relatesto3"); - Map> relatesTo = new HashMap<>(); - relatesTo.put("container", relatesToSet); - Set relatesToSet11 = new HashSet<>(); - relatesToSet11.add("relatesto4"); - relatesTo.put("container1", relatesToSet11); - entity.setRelatesToEntities(relatesTo); - // add some config entries - Map conf = new HashMap<>(); - conf.put("config_param1", "value1"); - conf.put("config_param2", "value2"); - conf.put("cfg_param1", "value3"); - entity.addConfigs(conf); - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId("MAP_SLOT_MILLIS"); - Map metricValues = new HashMap<>(); - long ts = System.currentTimeMillis(); - metricValues.put(ts - 120000, 100000000); - metricValues.put(ts - 100000, 200000000); - metricValues.put(ts - 80000, 300000000); - metricValues.put(ts - 60000, 400000000); - metricValues.put(ts - 40000, 50000000000L); - metricValues.put(ts - 20000, 60000000000L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - - TimelineMetric m12 = new TimelineMetric(); - m12.setId("MAP1_BYTES"); - m12.addValue(ts, 50); - metrics.add(m12); - entity.addMetrics(metrics); - TimelineEvent event = new TimelineEvent(); - event.setId("start_event"); - event.setTimestamp(ts); - entity.addEvent(event); - te.addEntity(entity); - TimelineEntities te1 = new TimelineEntities(); - TimelineEntity entity1 = new TimelineEntity(); - String id1 = "application_1111111111_3333"; - entity1.setId(id1); - entity1.setType(TimelineEntityType.YARN_APPLICATION.toString()); - entity1.setCreatedTime(cTime + 20L); - // add the info map in Timeline Entity - Map infoMap1 = new HashMap<>(); - infoMap1.put("infoMapKey1", "infoMapValue1"); - infoMap1.put("infoMapKey2", 10); - entity1.addInfo(infoMap1); - - // add the isRelatedToEntity info - Set isRelatedToSet1 = new HashSet<>(); - isRelatedToSet1.add("relatedto3"); - isRelatedToSet1.add("relatedto5"); - Map> isRelatedTo1 = new HashMap<>(); - isRelatedTo1.put("task1", isRelatedToSet1); - Set isRelatedToSet11 = new HashSet<>(); - isRelatedToSet11.add("relatedto4"); - isRelatedTo1.put("task2", isRelatedToSet11); - entity1.setIsRelatedToEntities(isRelatedTo1); - - // add the relatesTo info - Set relatesToSet1 = new HashSet<>(); - relatesToSet1.add("relatesto1"); - relatesToSet1.add("relatesto2"); - Map> relatesTo1 = new HashMap<>(); - relatesTo1.put("container", relatesToSet1); - entity1.setRelatesToEntities(relatesTo1); - - // add some config entries - Map conf1 = new HashMap<>(); - conf1.put("cfg_param1", "value1"); - conf1.put("cfg_param2", "value2"); - entity1.addConfigs(conf1); - - // add metrics - Set metrics1 = new HashSet<>(); - TimelineMetric m2 = new TimelineMetric(); - m2.setId("MAP1_SLOT_MILLIS"); - Map metricValues1 = new HashMap<>(); - long ts1 = System.currentTimeMillis(); - metricValues1.put(ts1 - 120000, 100000000); - metricValues1.put(ts1 - 100000, 200000000); - metricValues1.put(ts1 - 80000, 300000000); - metricValues1.put(ts1 - 60000, 400000000); - metricValues1.put(ts1 - 40000, 50000000000L); - metricValues1.put(ts1 - 20000, 60000000000L); - m2.setType(Type.TIME_SERIES); - m2.setValues(metricValues1); - metrics1.add(m2); - entity1.addMetrics(metrics1); - TimelineEvent event11 = new TimelineEvent(); - event11.setId("end_event"); - event11.setTimestamp(ts); - entity1.addEvent(event11); - TimelineEvent event12 = new TimelineEvent(); - event12.setId("update_event"); - event12.setTimestamp(ts - 10); - entity1.addEvent(event12); - te1.addEntity(entity1); - - TimelineEntities te2 = new TimelineEntities(); - TimelineEntity entity2 = new TimelineEntity(); - String id2 = "application_1111111111_4444"; - entity2.setId(id2); - entity2.setType(TimelineEntityType.YARN_APPLICATION.toString()); - entity2.setCreatedTime(cTime + 40L); - TimelineEvent event21 = new TimelineEvent(); - event21.setId("update_event"); - event21.setTimestamp(ts - 20); - entity2.addEvent(event21); - Set isRelatedToSet2 = new HashSet(); - isRelatedToSet2.add("relatedto3"); - Map> isRelatedTo2 = new HashMap<>(); - isRelatedTo2.put("task1", isRelatedToSet2); - entity2.setIsRelatedToEntities(isRelatedTo2); - Map> relatesTo3 = new HashMap<>(); - Set relatesToSet14 = new HashSet(); - relatesToSet14.add("relatesto7"); - relatesTo3.put("container2", relatesToSet14); - entity2.setRelatesToEntities(relatesTo3); - - te2.addEntity(entity2); - HBaseTimelineWriterImpl hbi = null; - try { - hbi = new HBaseTimelineWriterImpl(util.getConfiguration()); - hbi.init(util.getConfiguration()); - hbi.start(); - String cluster = "cluster1"; - String user = "user1"; - String flow = "some_flow_name"; - String flowVersion = "AB7822C10F1111"; - long runid = 1002345678919L; - String appName = "application_1111111111_2222"; - hbi.write(cluster, user, flow, flowVersion, runid, appName, te); - appName = "application_1111111111_3333"; - hbi.write(cluster, user, flow, flowVersion, runid, appName, te1); - appName = "application_1111111111_4444"; - hbi.write(cluster, user, flow, flowVersion, runid, appName, te2); - hbi.stop(); - } finally { - if (hbi != null) { - hbi.stop(); - hbi.close(); - } - } - } - - static void loadEntities(HBaseTestingUtility util) throws IOException { - TimelineEntities te = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - String id = "hello"; - String type = "world"; - entity.setId(id); - entity.setType(type); - Long cTime = 1425016502000L; - entity.setCreatedTime(cTime); - // add the info map in Timeline Entity - Map infoMap = new HashMap<>(); - infoMap.put("infoMapKey1", "infoMapValue2"); - infoMap.put("infoMapKey2", 20); - infoMap.put("infoMapKey3", 71.4); - entity.addInfo(infoMap); - // add the isRelatedToEntity info - Set isRelatedToSet = new HashSet<>(); - isRelatedToSet.add("relatedto1"); - Map> isRelatedTo = new HashMap<>(); - isRelatedTo.put("task", isRelatedToSet); - entity.setIsRelatedToEntities(isRelatedTo); - - // add the relatesTo info - Set relatesToSet = new HashSet(); - relatesToSet.add("relatesto1"); - relatesToSet.add("relatesto3"); - Map> relatesTo = new HashMap<>(); - relatesTo.put("container", relatesToSet); - Set relatesToSet11 = new HashSet<>(); - relatesToSet11.add("relatesto4"); - relatesTo.put("container1", relatesToSet11); - entity.setRelatesToEntities(relatesTo); - - // add some config entries - Map conf = new HashMap<>(); - conf.put("config_param1", "value1"); - conf.put("config_param2", "value2"); - conf.put("cfg_param1", "value3"); - entity.addConfigs(conf); - - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId("MAP_SLOT_MILLIS"); - Map metricValues = new HashMap<>(); - long ts = System.currentTimeMillis(); - metricValues.put(ts - 120000, 100000000); - metricValues.put(ts - 100000, 200000000); - metricValues.put(ts - 80000, 300000000); - metricValues.put(ts - 60000, 400000000); - metricValues.put(ts - 40000, 50000000000L); - metricValues.put(ts - 20000, 70000000000L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - - TimelineMetric m12 = new TimelineMetric(); - m12.setId("MAP1_BYTES"); - m12.addValue(ts, 50); - metrics.add(m12); - entity.addMetrics(metrics); - TimelineEvent event = new TimelineEvent(); - event.setId("start_event"); - event.setTimestamp(ts); - entity.addEvent(event); - te.addEntity(entity); - - TimelineEntity entity1 = new TimelineEntity(); - String id1 = "hello1"; - entity1.setId(id1); - entity1.setType(type); - entity1.setCreatedTime(cTime + 20L); - - // add the info map in Timeline Entity - Map infoMap1 = new HashMap<>(); - infoMap1.put("infoMapKey1", "infoMapValue1"); - infoMap1.put("infoMapKey2", 10); - entity1.addInfo(infoMap1); - - // add event. - TimelineEvent event11 = new TimelineEvent(); - event11.setId("end_event"); - event11.setTimestamp(ts); - entity1.addEvent(event11); - TimelineEvent event12 = new TimelineEvent(); - event12.setId("update_event"); - event12.setTimestamp(ts - 10); - entity1.addEvent(event12); - - - // add the isRelatedToEntity info - Set isRelatedToSet1 = new HashSet<>(); - isRelatedToSet1.add("relatedto3"); - isRelatedToSet1.add("relatedto5"); - Map> isRelatedTo1 = new HashMap<>(); - isRelatedTo1.put("task1", isRelatedToSet1); - Set isRelatedToSet11 = new HashSet<>(); - isRelatedToSet11.add("relatedto4"); - isRelatedTo1.put("task2", isRelatedToSet11); - entity1.setIsRelatedToEntities(isRelatedTo1); - - // add the relatesTo info - Set relatesToSet1 = new HashSet(); - relatesToSet1.add("relatesto1"); - relatesToSet1.add("relatesto2"); - Map> relatesTo1 = new HashMap<>(); - relatesTo1.put("container", relatesToSet1); - entity1.setRelatesToEntities(relatesTo1); - - // add some config entries - Map conf1 = new HashMap<>(); - conf1.put("cfg_param1", "value1"); - conf1.put("cfg_param2", "value2"); - entity1.addConfigs(conf1); - - // add metrics - Set metrics1 = new HashSet<>(); - TimelineMetric m2 = new TimelineMetric(); - m2.setId("MAP1_SLOT_MILLIS"); - Map metricValues1 = new HashMap<>(); - long ts1 = System.currentTimeMillis(); - metricValues1.put(ts1 - 120000, 100000000); - metricValues1.put(ts1 - 100000, 200000000); - metricValues1.put(ts1 - 80000, 300000000); - metricValues1.put(ts1 - 60000, 400000000); - metricValues1.put(ts1 - 40000, 50000000000L); - metricValues1.put(ts1 - 20000, 60000000000L); - m2.setType(Type.TIME_SERIES); - m2.setValues(metricValues1); - metrics1.add(m2); - entity1.addMetrics(metrics1); - te.addEntity(entity1); - - TimelineEntity entity2 = new TimelineEntity(); - String id2 = "hello2"; - entity2.setId(id2); - entity2.setType(type); - entity2.setCreatedTime(cTime + 40L); - TimelineEvent event21 = new TimelineEvent(); - event21.setId("update_event"); - event21.setTimestamp(ts - 20); - entity2.addEvent(event21); - Set isRelatedToSet2 = new HashSet<>(); - isRelatedToSet2.add("relatedto3"); - Map> isRelatedTo2 = new HashMap<>(); - isRelatedTo2.put("task1", isRelatedToSet2); - entity2.setIsRelatedToEntities(isRelatedTo2); - Map> relatesTo3 = new HashMap<>(); - Set relatesToSet14 = new HashSet<>(); - relatesToSet14.add("relatesto7"); - relatesTo3.put("container2", relatesToSet14); - entity2.setRelatesToEntities(relatesTo3); - te.addEntity(entity2); - - // For listing types - for (int i = 0; i < 10; i++) { - TimelineEntity entity3 = new TimelineEntity(); - String id3 = "typeTest" + i; - entity3.setId(id3); - StringBuilder typeName = new StringBuilder("newType"); - for (int j = 0; j < (i % 3); j++) { - typeName.append(" ").append(j); - } - entity3.setType(typeName.toString()); - entity3.setCreatedTime(cTime + 80L + i); - te.addEntity(entity3); - } - - // Create app entity for app to flow table - TimelineEntities appTe1 = new TimelineEntities(); - TimelineEntity entityApp1 = new TimelineEntity(); - String appName1 = "application_1231111111_1111"; - entityApp1.setId(appName1); - entityApp1.setType(TimelineEntityType.YARN_APPLICATION.toString()); - entityApp1.setCreatedTime(cTime + 40L); - TimelineEvent appCreationEvent1 = new TimelineEvent(); - appCreationEvent1.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - appCreationEvent1.setTimestamp(cTime); - entityApp1.addEvent(appCreationEvent1); - appTe1.addEntity(entityApp1); - - TimelineEntities appTe2 = new TimelineEntities(); - TimelineEntity entityApp2 = new TimelineEntity(); - String appName2 = "application_1231111111_1112"; - entityApp2.setId(appName2); - entityApp2.setType(TimelineEntityType.YARN_APPLICATION.toString()); - entityApp2.setCreatedTime(cTime + 50L); - TimelineEvent appCreationEvent2 = new TimelineEvent(); - appCreationEvent2.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - appCreationEvent2.setTimestamp(cTime); - entityApp2.addEvent(appCreationEvent2); - appTe2.addEntity(entityApp2); - - HBaseTimelineWriterImpl hbi = null; - try { - hbi = new HBaseTimelineWriterImpl(util.getConfiguration()); - hbi.init(util.getConfiguration()); - hbi.start(); - String cluster = "cluster1"; - String user = "user1"; - String flow = "some_flow_name"; - String flowVersion = "AB7822C10F1111"; - long runid = 1002345678919L; - hbi.write(cluster, user, flow, flowVersion, runid, appName1, te); - hbi.write(cluster, user, flow, flowVersion, runid, appName2, te); - hbi.write(cluster, user, flow, flowVersion, runid, appName1, appTe1); - hbi.write(cluster, user, flow, flowVersion, runid, appName2, appTe2); - hbi.stop(); - } finally { - if (hbi != null) { - hbi.stop(); - hbi.close(); - } - } - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java index e70198a..ac41238 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java @@ -72,6 +72,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -91,14 +92,10 @@ public static void setupBeforeClass() throws Exception { util = new HBaseTestingUtility(); util.startMiniCluster(); - createSchema(); + DataGeneratorForTest.createSchema(util.getConfiguration()); DataGeneratorForTest.loadApps(util); } - private static void createSchema() throws IOException { - TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); - } - @Before public void init() throws Exception { reader = new HBaseTimelineReaderImpl(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java index 9b35ef3..3527de5 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java @@ -71,6 +71,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -98,14 +99,10 @@ public static void setupBeforeClass() throws Exception { util = new HBaseTestingUtility(); util.startMiniCluster(); - createSchema(); + DataGeneratorForTest.createSchema(util.getConfiguration()); DataGeneratorForTest.loadEntities(util); } - private static void createSchema() throws IOException { - TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); - } - @Before public void init() throws Exception { reader = new HBaseTimelineReaderImpl(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java index 53045e5..3b62e63 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java @@ -38,6 +38,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; /** * Unit tests for checking different schema prefixes. @@ -48,17 +49,15 @@ @BeforeClass public static void setupBeforeClass() throws Exception { util = new HBaseTestingUtility(); + Configuration conf = util.getConfiguration(); + conf.setInt("hfile.format.version", 3); util.startMiniCluster(); } - private static void createSchema(Configuration conf) throws IOException { - TimelineSchemaCreator.createAllTables(conf, false); - } - @Test public void createWithDefaultPrefix() throws IOException { Configuration hbaseConf = util.getConfiguration(); - createSchema(hbaseConf); + DataGeneratorForTest.createSchema(hbaseConf); Connection conn = null; conn = ConnectionFactory.createConnection(hbaseConf); Admin admin = conn.getAdmin(); @@ -88,7 +87,7 @@ public void createWithSetPrefix() throws IOException { String prefix = "unit-test."; hbaseConf.set(YarnConfiguration.TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX_NAME, prefix); - createSchema(hbaseConf); + DataGeneratorForTest.createSchema(hbaseConf); Connection conn = null; conn = ConnectionFactory.createConnection(hbaseConf); Admin admin = conn.getAdmin(); @@ -115,7 +114,7 @@ public void createWithSetPrefix() throws IOException { prefix = "yet-another-unit-test."; hbaseConf.set(YarnConfiguration.TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX_NAME, prefix); - createSchema(hbaseConf); + DataGeneratorForTest.createSchema(hbaseConf); entityTableName = BaseTable.getTableName(hbaseConf, EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(entityTableName)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java deleted file mode 100644 index b608987..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java +++ /dev/null @@ -1,386 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric.Type; -import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; -import org.apache.hadoop.conf.Configuration; - -/** - * Generates the data/entities for the FlowRun and FlowActivity Tables. - */ -final class TestFlowDataGenerator { - private TestFlowDataGenerator() { - } - - private static final String METRIC_1 = "MAP_SLOT_MILLIS"; - private static final String METRIC_2 = "HDFS_BYTES_READ"; - public static final long END_TS_INCR = 10000L; - - static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunMetrics_test"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - long cTime = 1425016501000L; - entity.setCreatedTime(cTime); - - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId(METRIC_1); - Map metricValues = new HashMap(); - long ts = insertTs; - - for (int k = 1; k < 100; k++) { - metricValues.put(ts - k * 200000L, 20L); - } - metricValues.put(ts - 80000, 40L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - - TimelineMetric m2 = new TimelineMetric(); - m2.setId(METRIC_2); - metricValues = new HashMap(); - ts = System.currentTimeMillis(); - for (int k = 1; k < 100; k++) { - metricValues.put(ts - k*100000L, 31L); - } - - metricValues.put(ts - 80000, 57L); - m2.setType(Type.TIME_SERIES); - m2.setValues(metricValues); - metrics.add(m2); - - entity.addMetrics(metrics); - return entity; - } - - - static TimelineEntity getEntityMetricsApp1Complete(long insertTs, - Configuration c1) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunMetrics_test"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - long cTime = 1425016501000L; - entity.setCreatedTime(cTime); - - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId(METRIC_1); - Map metricValues = new HashMap(); - long ts = insertTs; - - metricValues.put(ts - 80000, 40L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - - TimelineMetric m2 = new TimelineMetric(); - m2.setId(METRIC_2); - metricValues = new HashMap(); - ts = insertTs; - metricValues.put(ts - 80000, 57L); - m2.setType(Type.TIME_SERIES); - m2.setValues(metricValues); - metrics.add(m2); - - entity.addMetrics(metrics); - - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - event.setTimestamp(insertTs); - event.addInfo("done", "insertTs=" + insertTs); - entity.addEvent(event); - return entity; - } - - - static TimelineEntity getEntityMetricsApp1(long insertTs) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunMetrics_test"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - long cTime = 1425016501000L; - entity.setCreatedTime(cTime); - - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId(METRIC_1); - Map metricValues = new HashMap(); - long ts = insertTs; - metricValues.put(ts - 100000, 2L); - metricValues.put(ts - 80000, 40L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - - TimelineMetric m2 = new TimelineMetric(); - m2.setId(METRIC_2); - metricValues = new HashMap(); - ts = insertTs; - metricValues.put(ts - 100000, 31L); - metricValues.put(ts - 80000, 57L); - m2.setType(Type.TIME_SERIES); - m2.setValues(metricValues); - metrics.add(m2); - - entity.addMetrics(metrics); - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - long endTs = 1439379885000L; - event.setTimestamp(endTs); - String expKey = "foo_event_greater"; - String expVal = "test_app_greater"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - return entity; - } - - - static TimelineEntity getEntityMetricsApp2(long insertTs) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunMetrics_test"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - long cTime = 1425016501000L; - entity.setCreatedTime(cTime); - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId(METRIC_1); - Map metricValues = new HashMap(); - long ts = insertTs; - metricValues.put(ts - 100000, 5L); - metricValues.put(ts - 80000, 101L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - entity.addMetrics(metrics); - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - long endTs = 1439379885000L; - event.setTimestamp(endTs); - String expKey = "foo_event_greater"; - String expVal = "test_app_greater"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - return entity; - } - - static TimelineEntity getEntity1() { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunHello"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - long cTime = 1425026901000L; - entity.setCreatedTime(cTime); - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId(METRIC_1); - Map metricValues = new HashMap(); - long ts = System.currentTimeMillis(); - metricValues.put(ts - 120000, 100000000L); - metricValues.put(ts - 100000, 200000000L); - metricValues.put(ts - 80000, 300000000L); - metricValues.put(ts - 60000, 400000000L); - metricValues.put(ts - 40000, 50000000000L); - metricValues.put(ts - 20000, 60000000000L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - entity.addMetrics(metrics); - - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - event.setTimestamp(cTime); - String expKey = "foo_event"; - Object expVal = "test"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - - event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - long expTs = cTime + 21600000; // start time + 6hrs - event.setTimestamp(expTs); - event.addInfo(expKey, expVal); - entity.addEvent(event); - - return entity; - } - - static TimelineEntity getAFullEntity(long ts, long endTs) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunFullEntity"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - entity.setCreatedTime(ts); - // add metrics - Set metrics = new HashSet<>(); - TimelineMetric m1 = new TimelineMetric(); - m1.setId(METRIC_1); - Map metricValues = new HashMap(); - metricValues.put(ts - 120000, 100000000L); - metricValues.put(ts - 100000, 200000000L); - metricValues.put(ts - 80000, 300000000L); - metricValues.put(ts - 60000, 400000000L); - metricValues.put(ts - 40000, 50000000000L); - metricValues.put(ts - 20000, 60000000000L); - m1.setType(Type.TIME_SERIES); - m1.setValues(metricValues); - metrics.add(m1); - TimelineMetric m2 = new TimelineMetric(); - m2.setId(METRIC_2); - metricValues = new HashMap(); - metricValues.put(ts - 900000, 31L); - metricValues.put(ts - 30000, 57L); - m2.setType(Type.TIME_SERIES); - m2.setValues(metricValues); - metrics.add(m2); - entity.addMetrics(metrics); - - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - event.setTimestamp(ts); - String expKey = "foo_event"; - Object expVal = "test"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - - event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - long expTs = ts + 21600000; // start time + 6hrs - event.setTimestamp(expTs); - event.addInfo(expKey, expVal); - entity.addEvent(event); - - return entity; - } - - static TimelineEntity getEntityGreaterStartTime(long startTs) { - TimelineEntity entity = new TimelineEntity(); - entity.setCreatedTime(startTs); - entity.setId("flowRunHello with greater start time"); - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setType(type); - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - event.setTimestamp(startTs); - String expKey = "foo_event_greater"; - String expVal = "test_app_greater"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - return entity; - } - - static TimelineEntity getEntityMaxEndTime(long endTs) { - TimelineEntity entity = new TimelineEntity(); - entity.setId("flowRunHello Max End time"); - entity.setType(TimelineEntityType.YARN_APPLICATION.toString()); - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - event.setTimestamp(endTs); - String expKey = "foo_even_max_ finished"; - String expVal = "test_app_max_finished"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - return entity; - } - - static TimelineEntity getEntityMinStartTime(long startTs) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunHelloMInStartTime"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - entity.setCreatedTime(startTs); - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - event.setTimestamp(startTs); - entity.addEvent(event); - return entity; - } - - static TimelineEntity getMinFlushEntity(long startTs) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunHelloFlushEntityMin"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - entity.setCreatedTime(startTs); - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - event.setTimestamp(startTs); - entity.addEvent(event); - return entity; - } - - static TimelineEntity getMaxFlushEntity(long startTs) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowRunHelloFlushEntityMax"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - entity.setCreatedTime(startTs); - - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - event.setTimestamp(startTs + END_TS_INCR); - entity.addEvent(event); - return entity; - } - - static TimelineEntity getFlowApp1(long appCreatedTime) { - TimelineEntity entity = new TimelineEntity(); - String id = "flowActivity_test"; - String type = TimelineEntityType.YARN_APPLICATION.toString(); - entity.setId(id); - entity.setType(type); - entity.setCreatedTime(appCreatedTime); - - TimelineEvent event = new TimelineEvent(); - event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - event.setTimestamp(appCreatedTime); - String expKey = "foo_event"; - Object expVal = "test"; - event.addInfo(expKey, expVal); - entity.addEvent(event); - - return entity; - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java index 2db01a6..b257177 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java @@ -49,10 +49,11 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; -import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; +import org.apache.hadoop.yarn.server.timelineservice.test.common.TestFlowDataGenerator; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -70,11 +71,7 @@ public static void setupBeforeClass() throws Exception { Configuration conf = util.getConfiguration(); conf.setInt("hfile.format.version", 3); util.startMiniCluster(); - createSchema(); - } - - private static void createSchema() throws IOException { - TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); + DataGeneratorForTest.createSchema(util.getConfiguration()); } /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java index c066a1f..06dd39f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java @@ -19,7 +19,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.flow; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -41,8 +40,8 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities; @@ -60,10 +59,11 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; -import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; +import org.apache.hadoop.yarn.server.timelineservice.test.common.TestFlowDataGenerator; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -84,11 +84,7 @@ public static void setupBeforeClass() throws Exception { Configuration conf = util.getConfiguration(); conf.setInt("hfile.format.version", 3); util.startMiniCluster(); - createSchema(); - } - - private static void createSchema() throws IOException { - TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); + DataGeneratorForTest.createSchema(util.getConfiguration()); } @Test @@ -106,12 +102,7 @@ public void checkCoProcessorOff() throws IOException, InterruptedException { // check the regions. // check in flow run table util.waitUntilAllRegionsAssigned(table); - HRegionServer server = util.getRSForFirstRegionInTable(table); - List regions = server.getOnlineRegions(table); - for (Region region : regions) { - assertTrue(FlowRunTable.isFlowRunTable(region.getRegionInfo(), - hbaseConf)); - } + checkCoprocessorExists(table, true); } table = BaseTable.getTableName(hbaseConf, @@ -121,12 +112,7 @@ public void checkCoProcessorOff() throws IOException, InterruptedException { // check the regions. // check in flow activity table util.waitUntilAllRegionsAssigned(table); - HRegionServer server = util.getRSForFirstRegionInTable(table); - List regions = server.getOnlineRegions(table); - for (Region region : regions) { - assertFalse(FlowRunTable.isFlowRunTable(region.getRegionInfo(), - hbaseConf)); - } + checkCoprocessorExists(table, false); } table = BaseTable.getTableName(hbaseConf, EntityTable.TABLE_NAME_CONF_NAME, @@ -135,12 +121,23 @@ public void checkCoProcessorOff() throws IOException, InterruptedException { // check the regions. // check in entity run table util.waitUntilAllRegionsAssigned(table); - HRegionServer server = util.getRSForFirstRegionInTable(table); - List regions = server.getOnlineRegions(table); - for (Region region : regions) { - assertFalse(FlowRunTable.isFlowRunTable(region.getRegionInfo(), - hbaseConf)); + checkCoprocessorExists(table, false); + } + } + + private void checkCoprocessorExists(TableName table, boolean exists) + throws IOException, InterruptedException { + HRegionServer server = util.getRSForFirstRegionInTable(table); + List regions = server.getOnlineRegions(table); + for (Region region : regions) { + boolean found = false; + Set coprocs = region.getCoprocessorHost().getCoprocessors(); + for (String coprocName : coprocs) { + if (coprocName.contains("FlowRunCoprocessor")) { + found = true; + } } + assertEquals(found, exists); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java index 644e31a..f34e82c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java @@ -51,12 +51,13 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; -import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator; +import org.apache.hadoop.yarn.server.timelineservice.test.common.DataGeneratorForTest; +import org.apache.hadoop.yarn.server.timelineservice.test.common.TestFlowDataGenerator; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -82,11 +83,7 @@ public static void setupBeforeClass() throws Exception { Configuration conf = util.getConfiguration(); conf.setInt("hfile.format.version", 3); util.startMiniCluster(); - createSchema(); - } - - private static void createSchema() throws IOException { - TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); + DataGeneratorForTest.createSchema(util.getConfiguration()); } /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java index 122d399..24101c6 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java @@ -57,7 +57,6 @@ public class FlowRunCoprocessor extends BaseRegionObserver { private static final Log LOG = LogFactory.getLog(FlowRunCoprocessor.class); - private boolean isFlowRunRegion = false; private Region region; /** @@ -71,15 +70,9 @@ public void start(CoprocessorEnvironment e) throws IOException { if (e instanceof RegionCoprocessorEnvironment) { RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) e; this.region = env.getRegion(); - isFlowRunRegion = FlowRunTable.isFlowRunTable( - region.getRegionInfo(), env.getConfiguration()); } } - public boolean isFlowRunRegion() { - return isFlowRunRegion; - } - /* * (non-Javadoc) * @@ -99,10 +92,6 @@ public boolean isFlowRunRegion() { public void prePut(ObserverContext e, Put put, WALEdit edit, Durability durability) throws IOException { Map attributes = put.getAttributesMap(); - - if (!isFlowRunRegion) { - return; - } // Assumption is that all the cells in a put are the same operation. List tags = new ArrayList<>(); if ((attributes != null) && (attributes.size() > 0)) { @@ -170,10 +159,6 @@ private long getCellTimestamp(long timestamp, List tags) { @Override public void preGetOp(ObserverContext e, Get get, List results) throws IOException { - if (!isFlowRunRegion) { - return; - } - Scan scan = new Scan(get); scan.setMaxVersions(); RegionScanner scanner = null; @@ -205,12 +190,9 @@ public void preGetOp(ObserverContext e, public RegionScanner preScannerOpen( ObserverContext e, Scan scan, RegionScanner scanner) throws IOException { - - if (isFlowRunRegion) { - // set max versions for scan to see all - // versions to aggregate for metrics - scan.setMaxVersions(); - } + // set max versions for scan to see all + // versions to aggregate for metrics + scan.setMaxVersions(); return scanner; } @@ -230,9 +212,6 @@ public RegionScanner preScannerOpen( public RegionScanner postScannerOpen( ObserverContext e, Scan scan, RegionScanner scanner) throws IOException { - if (!isFlowRunRegion) { - return scanner; - } return new FlowScanner(e.getEnvironment(), scan, scanner, FlowScannerOperation.READ); } @@ -241,9 +220,6 @@ public RegionScanner postScannerOpen( public InternalScanner preFlush( ObserverContext c, Store store, InternalScanner scanner) throws IOException { - if (!isFlowRunRegion) { - return scanner; - } if (LOG.isDebugEnabled()) { if (store != null) { LOG.debug("preFlush store = " + store.getColumnFamilyName() @@ -264,9 +240,6 @@ public InternalScanner preFlush( @Override public void postFlush(ObserverContext c, Store store, StoreFile resultFile) { - if (!isFlowRunRegion) { - return; - } if (LOG.isDebugEnabled()) { if (store != null) { LOG.debug("postFlush store = " + store.getColumnFamilyName() @@ -288,9 +261,6 @@ public InternalScanner preCompact( InternalScanner scanner, ScanType scanType, CompactionRequest request) throws IOException { - if (!isFlowRunRegion) { - return scanner; - } FlowScannerOperation requestOp = FlowScannerOperation.MINOR_COMPACTION; if (request != null) { requestOp = (request.isMajor() ? FlowScannerOperation.MAJOR_COMPACTION diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java index 4cd581b..4d5dd47 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java @@ -23,13 +23,14 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.Coprocessor; /** * The flow run table has column family info @@ -95,6 +96,10 @@ /** default value for flowrun table name. */ public static final String DEFAULT_TABLE_NAME = "timelineservice.flowrun"; + /** default location for flowrun coprocessor. */ + public static final String DEFAULT_HDFS_LOCATION_FLOW_RUN_COPROCESSOR_JAR = + "/hbase/coprocessor/hadoop-yarn-server-timelineservice.jar"; + private static final Log LOG = LogFactory.getLog(FlowRunTable.class); /** default max number of versions. */ @@ -133,29 +138,16 @@ public void createTable(Admin admin, Configuration hbaseConf) infoCF.setMaxVersions(DEFAULT_METRICS_MAX_VERSIONS); // TODO: figure the split policy - flowRunTableDescp.addCoprocessor(FlowRunCoprocessor.class - .getCanonicalName()); + String coprocessorJarPathStr = hbaseConf.get( + YarnConfiguration.FLOW_RUN_COPROCESSOR_JAR_HDFS_LOCATION, + DEFAULT_HDFS_LOCATION_FLOW_RUN_COPROCESSOR_JAR); + + Path coprocessorJarPath = new Path(coprocessorJarPathStr); + flowRunTableDescp.addCoprocessor( + FlowRunCoprocessor.class.getCanonicalName(), coprocessorJarPath, + Coprocessor.PRIORITY_USER, null); admin.createTable(flowRunTableDescp); LOG.info("Status of table creation for " + table.getNameAsString() + "=" + admin.tableExists(table)); } - - public static boolean isFlowRunTable(HRegionInfo hRegionInfo, - Configuration conf) { - String regionTableName = hRegionInfo.getTable().getNameAsString(); - if (LOG.isDebugEnabled()) { - LOG.debug("regionTableName=" + regionTableName); - } - String flowRunTableName = BaseTable.getTableName(conf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME) - .getNameAsString(); - if (flowRunTableName.equalsIgnoreCase(regionTableName)) { - if (LOG.isDebugEnabled()) { - LOG.debug(" table is the flow run table!! " - + flowRunTableName); - } - return true; - } - return false; - } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md index 0d77f2c..9b4e59c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md @@ -216,30 +216,32 @@ http://hbase.apache.org/book.html#standalone.over.hdfs . Once you have an Apache HBase cluster ready to use, perform the following steps. ##### Step 2) Enable the coprocessor +In this version, the coprocessor is loaded dynamically (table coprocessor for the `flowrun` table). -Step 2.1) Add the timeline service jar to the HBase classpath in all HBase machines in the cluster. It -is needed for the coprocessor as well as the schema creator. For example, +Copy the timeline service jar to HDFS from where HBase can load it. It +is needed for the `flowrun` table creation in the schema creator. The default HDFS location is `/hbase/coprocessor`. +For example, - cp hadoop-yarn-server-timelineservice-3.0.0-alpha1-SNAPSHOT.jar /usr/hbase/lib/ + hadoop fs -mkdir /hbase/coprocessor + hadoop fs -put hadoop-yarn-server-timelineservice-3.0.0-alpha1-SNAPSHOT.jar + /hbase/coprocessor/hadoop-yarn-server-timelineservice.jar -Step 2.2) Enable the coprocessor that handles the aggregation. To enable it, add the following entry in -region servers' `hbase-site.xml` file (generally located in the `conf` directory) as follows: + +If you want to place the jar at a different location on hdfs, there also exists a yarn +configuration setting called `yarn.timeline-service.hbase.coprocessor.jar.hdfs.location`. +For example, ``` - hbase.coprocessor.region.classes - org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunCoprocessor + yarn.timeline-service.hbase.coprocessor.jar.hdfs.location + /custom/hdfs/path/jarName ``` -Step 2.3) Restart the region servers and the master to pick up the timeline service jar as well -as the config change. In this version, the coprocessor is loaded statically -(i.e. system coprocessor) as opposed to a dynamically (table coprocessor). - ##### Step 3) Create the timeline service schema Finally, run the schema creator tool to create the necessary tables: - bin/hbase org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator + bin/hadoop org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator The `TimelineSchemaCreator` tool supports a few options that may come handy especially when you are testing. For example, you can use `-skipExistingTable` (`-s` for short) to skip existing tables