diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml index affbe03..0a31dbd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml @@ -95,7 +95,6 @@ server/yarn_server_resourcemanager_service_protos.proto server/resourcemanager_administration_protocol.proto application_history_client.proto - server/application_history_server.proto ${project.build.directory}/generated-sources/java diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 1a2aa1d..407318a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -329,12 +329,6 @@ RM_PREFIX + "nodemanagers.heartbeat-interval-ms"; public static final long DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS = 1000; - /** Number of worker threads that write the history data. */ - public static final String RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE = - RM_PREFIX + "history-writer.multi-threaded-dispatcher.pool-size"; - public static final int DEFAULT_RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE = - 10; - /** * The setting that controls whether yarn system metrics is published on the * timeline server or not by RM. @@ -1186,24 +1180,6 @@ @Private public static final boolean DEFAULT_APPLICATION_HISTORY_ENABLED = false; - /** Application history store class */ - @Private - public static final String APPLICATION_HISTORY_STORE = - APPLICATION_HISTORY_PREFIX + "store-class"; - - /** URI for FileSystemApplicationHistoryStore */ - @Private - public static final String FS_APPLICATION_HISTORY_STORE_URI = - APPLICATION_HISTORY_PREFIX + "fs-history-store.uri"; - - /** T-file compression types used to compress history data.*/ - @Private - public static final String FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE = - APPLICATION_HISTORY_PREFIX + "fs-history-store.compression-type"; - @Private - public static final String DEFAULT_FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE = - "none"; - /** The setting that controls whether timeline service is enabled or not. */ public static final String TIMELINE_SERVICE_ENABLED = TIMELINE_SERVICE_PREFIX + "enabled"; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/application_history_server.proto hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/application_history_server.proto deleted file mode 100644 index 0fcf2ac..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/application_history_server.proto +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -option java_package = "org.apache.hadoop.yarn.proto"; -option java_outer_classname = "ApplicationHistoryServerProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -package hadoop.yarn; - -import "yarn_protos.proto"; - -message ApplicationHistoryDataProto { - optional ApplicationIdProto application_id = 1; - optional string application_name = 2; - optional string application_type = 3; - optional string user = 4; - optional string queue = 5; - optional int64 submit_time = 6; - optional int64 start_time = 7; - optional int64 finish_time = 8; - optional string diagnostics_info = 9; - optional FinalApplicationStatusProto final_application_status = 10; - optional YarnApplicationStateProto yarn_application_state = 11; -} - -message ApplicationStartDataProto { - optional ApplicationIdProto application_id = 1; - optional string application_name = 2; - optional string application_type = 3; - optional string user = 4; - optional string queue = 5; - optional int64 submit_time = 6; - optional int64 start_time = 7; -} - -message ApplicationFinishDataProto { - optional ApplicationIdProto application_id = 1; - optional int64 finish_time = 2; - optional string diagnostics_info = 3; - optional FinalApplicationStatusProto final_application_status = 4; - optional YarnApplicationStateProto yarn_application_state = 5; -} - -message ApplicationAttemptHistoryDataProto { - optional ApplicationAttemptIdProto application_attempt_id = 1; - optional string host = 2; - optional int32 rpc_port = 3; - optional string tracking_url = 4; - optional string diagnostics_info = 5; - optional FinalApplicationStatusProto final_application_status = 6; - optional ContainerIdProto master_container_id = 7; - optional YarnApplicationAttemptStateProto yarn_application_attempt_state = 8; -} - -message ApplicationAttemptStartDataProto { - optional ApplicationAttemptIdProto application_attempt_id = 1; - optional string host = 2; - optional int32 rpc_port = 3; - optional ContainerIdProto master_container_id = 4; -} - -message ApplicationAttemptFinishDataProto { - optional ApplicationAttemptIdProto application_attempt_id = 1; - optional string tracking_url = 2; - optional string diagnostics_info = 3; - optional FinalApplicationStatusProto final_application_status = 4; - optional YarnApplicationAttemptStateProto yarn_application_attempt_state = 5; -} - -message ContainerHistoryDataProto { - optional ContainerIdProto container_id = 1; - optional ResourceProto allocated_resource = 2; - optional NodeIdProto assigned_node_id = 3; - optional PriorityProto priority = 4; - optional int64 start_time = 5; - optional int64 finish_time = 6; - optional string diagnostics_info = 7; - optional int32 container_exit_status = 8; - optional ContainerStateProto container_state = 9; -} - -message ContainerStartDataProto { - optional ContainerIdProto container_id = 1; - optional ResourceProto allocated_resource = 2; - optional NodeIdProto assigned_node_id = 3; - optional PriorityProto priority = 4; - optional int64 start_time = 5; -} - -message ContainerFinishDataProto { - optional ContainerIdProto container_id = 1; - optional int64 finish_time = 2; - optional string diagnostics_info = 3; - optional int32 container_exit_status = 4; - optional ContainerStateProto container_state = 5; -} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java index 803dc01..001aca3 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java @@ -19,225 +19,546 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerReport; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; +import org.apache.hadoop.yarn.api.records.ContainerState; +import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; +import org.apache.hadoop.yarn.api.records.YarnApplicationState; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException; +import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; +import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.metrics.AppAttemptMetricsConstants; +import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; +import org.apache.hadoop.yarn.server.metrics.ContainerMetricsConstants; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.server.timeline.NameValuePair; +import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; +import org.apache.hadoop.yarn.server.timeline.TimelineReader.Field; +import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; -import com.google.common.annotations.VisibleForTesting; - -public class ApplicationHistoryManagerImpl extends AbstractService implements +public class ApplicationHistoryManagerImpl extends AbstractService + implements ApplicationHistoryManager { - private static final Log LOG = LogFactory - .getLog(ApplicationHistoryManagerImpl.class); - private static final String UNAVAILABLE = "N/A"; - private ApplicationHistoryStore historyStore; + private TimelineDataManager timelineDataManager; + private ApplicationACLsManager aclsManager; private String serverHttpAddress; - public ApplicationHistoryManagerImpl() { + public ApplicationHistoryManagerImpl( + TimelineDataManager timelineDataManager, + ApplicationACLsManager aclsManager) { super(ApplicationHistoryManagerImpl.class.getName()); + this.timelineDataManager = timelineDataManager; + this.aclsManager = aclsManager; } @Override protected void serviceInit(Configuration conf) throws Exception { - LOG.info("ApplicationHistory Init"); - historyStore = createApplicationHistoryStore(conf); - historyStore.init(conf); serverHttpAddress = WebAppUtils.getHttpSchemePrefix(conf) + WebAppUtils.getAHSWebAppURLWithoutScheme(conf); super.serviceInit(conf); } @Override - protected void serviceStart() throws Exception { - LOG.info("Starting ApplicationHistory"); - historyStore.start(); - super.serviceStart(); + public ApplicationReport getApplication(ApplicationId appId) + throws YarnException, IOException { + return getApplication(appId, ApplicationReportField.ALL).appReport; } @Override - protected void serviceStop() throws Exception { - LOG.info("Stopping ApplicationHistory"); - historyStore.stop(); - super.serviceStop(); + public Map getAllApplications() + throws YarnException, IOException { + TimelineEntities entities = timelineDataManager.getEntities( + ApplicationMetricsConstants.ENTITY_TYPE, null, null, null, null, + null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), + UserGroupInformation.getLoginUser()); + Map apps = + new HashMap(); + if (entities != null && entities.getEntities() != null) { + for (TimelineEntity entity : entities.getEntities()) { + ApplicationReportExt app = + generateApplicationReport(entity, ApplicationReportField.ALL); + apps.put(app.appReport.getApplicationId(), app.appReport); + } + } + return apps; } - protected ApplicationHistoryStore createApplicationHistoryStore( - Configuration conf) { - return ReflectionUtils.newInstance(conf.getClass( - YarnConfiguration.APPLICATION_HISTORY_STORE, - FileSystemApplicationHistoryStore.class, - ApplicationHistoryStore.class), conf); + @Override + public Map + getApplicationAttempts(ApplicationId appId) + throws YarnException, IOException { + ApplicationReportExt app = getApplication( + appId, ApplicationReportField.USER_AND_ACLS); + checkAccess(app); + TimelineEntities entities = timelineDataManager.getEntities( + AppAttemptMetricsConstants.ENTITY_TYPE, + new NameValuePair( + AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId + .toString()), null, null, null, null, null, + Long.MAX_VALUE, EnumSet.allOf(Field.class), + UserGroupInformation.getLoginUser()); + Map appAttempts = + new HashMap(); + for (TimelineEntity entity : entities.getEntities()) { + ApplicationAttemptReport appAttempt = + convertToApplicationAttemptReport(entity); + appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt); + } + return appAttempts; } @Override - public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId) - throws IOException { - ApplicationReport app = - getApplication(appAttemptId.getApplicationId()); - return convertToContainerReport(historyStore.getAMContainer(appAttemptId), - app == null ? null : app.getUser()); + public ApplicationAttemptReport getApplicationAttempt( + ApplicationAttemptId appAttemptId) throws YarnException, IOException { + ApplicationReportExt app = getApplication( + appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS); + checkAccess(app); + TimelineEntity entity = timelineDataManager.getEntity( + AppAttemptMetricsConstants.ENTITY_TYPE, + appAttemptId.toString(), EnumSet.allOf(Field.class), + UserGroupInformation.getLoginUser()); + if (entity == null) { + throw new ApplicationAttemptNotFoundException( + "The entity for application attempt " + appAttemptId + + " doesn't exist in the timeline store"); + } else { + return convertToApplicationAttemptReport(entity); + } } @Override - public Map getAllApplications() - throws IOException { - Map histData = - historyStore.getAllApplications(); - HashMap applicationsReport = - new HashMap(); - for (Entry entry : histData - .entrySet()) { - applicationsReport.put(entry.getKey(), - convertToApplicationReport(entry.getValue())); + public ContainerReport getContainer(ContainerId containerId) + throws YarnException, IOException { + ApplicationReportExt app = getApplication( + containerId.getApplicationAttemptId().getApplicationId(), + ApplicationReportField.USER_AND_ACLS); + checkAccess(app); + TimelineEntity entity = timelineDataManager.getEntity( + ContainerMetricsConstants.ENTITY_TYPE, + containerId.toString(), EnumSet.allOf(Field.class), + UserGroupInformation.getLoginUser()); + if (entity == null) { + throw new ContainerNotFoundException( + "The entity for container " + containerId + + " doesn't exist in the timeline store"); + } else { + return convertToContainerReport( + entity, serverHttpAddress, app.appReport.getUser()); } - return applicationsReport; } @Override - public ApplicationReport getApplication(ApplicationId appId) - throws IOException { - return convertToApplicationReport(historyStore.getApplication(appId)); + public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId) + throws YarnException, IOException { + ApplicationAttemptReport appAttempt = getApplicationAttempt(appAttemptId); + return getContainer(appAttempt.getAMContainerId()); } - private ApplicationReport convertToApplicationReport( - ApplicationHistoryData appHistory) throws IOException { - ApplicationAttemptId currentApplicationAttemptId = null; - String trackingUrl = UNAVAILABLE; - String host = UNAVAILABLE; - int rpcPort = -1; - - ApplicationAttemptHistoryData lastAttempt = - getLastAttempt(appHistory.getApplicationId()); - if (lastAttempt != null) { - currentApplicationAttemptId = lastAttempt.getApplicationAttemptId(); - trackingUrl = lastAttempt.getTrackingURL(); - host = lastAttempt.getHost(); - rpcPort = lastAttempt.getRPCPort(); + @Override + public Map getContainers( + ApplicationAttemptId appAttemptId) throws YarnException, IOException { + ApplicationReportExt app = getApplication( + appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS); + checkAccess(app); + TimelineEntities entities = timelineDataManager.getEntities( + ContainerMetricsConstants.ENTITY_TYPE, + new NameValuePair( + ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, + appAttemptId.toString()), null, null, null, + null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), + UserGroupInformation.getLoginUser()); + Map containers = + new HashMap(); + if (entities != null && entities.getEntities() != null) { + for (TimelineEntity entity : entities.getEntities()) { + ContainerReport container = convertToContainerReport( + entity, serverHttpAddress, app.appReport.getUser()); + containers.put(container.getContainerId(), container); + } } - return ApplicationReport.newInstance(appHistory.getApplicationId(), - currentApplicationAttemptId, appHistory.getUser(), appHistory.getQueue(), - appHistory.getApplicationName(), host, rpcPort, null, - appHistory.getYarnApplicationState(), appHistory.getDiagnosticsInfo(), - trackingUrl, appHistory.getStartTime(), appHistory.getFinishTime(), - appHistory.getFinalApplicationStatus(), null, "", 100, - appHistory.getApplicationType(), null); + return containers; } - private ApplicationAttemptHistoryData getLastAttempt(ApplicationId appId) - throws IOException { - Map attempts = - historyStore.getApplicationAttempts(appId); - ApplicationAttemptId prevMaxAttemptId = null; - for (ApplicationAttemptId attemptId : attempts.keySet()) { - if (prevMaxAttemptId == null) { - prevMaxAttemptId = attemptId; - } else { - if (prevMaxAttemptId.getAttemptId() < attemptId.getAttemptId()) { - prevMaxAttemptId = attemptId; + private static ApplicationReportExt convertToApplicationReport( + TimelineEntity entity, ApplicationReportField field) { + String user = null; + String queue = null; + String name = null; + String type = null; + long createdTime = 0; + long finishedTime = 0; + ApplicationAttemptId latestApplicationAttemptId = null; + String diagnosticsInfo = null; + FinalApplicationStatus finalStatus = FinalApplicationStatus.UNDEFINED; + YarnApplicationState state = null; + Map appViewACLs = + new HashMap(); + Map entityInfo = entity.getOtherInfo(); + if (entityInfo != null) { + if (entityInfo.containsKey(ApplicationMetricsConstants.USER_ENTITY_INFO)) { + user = + entityInfo.get(ApplicationMetricsConstants.USER_ENTITY_INFO) + .toString(); + } + if (entityInfo.containsKey(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO)) { + String appViewACLsStr = entityInfo.get( + ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO).toString(); + if (appViewACLsStr.length() > 0) { + appViewACLs.put(ApplicationAccessType.VIEW_APP, appViewACLsStr); + } + } + if (field == ApplicationReportField.USER_AND_ACLS) { + return new ApplicationReportExt(ApplicationReport.newInstance( + ConverterUtils.toApplicationId(entity.getEntityId()), + latestApplicationAttemptId, user, queue, name, null, -1, null, state, + diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, + null, 1.0F, type, null), appViewACLs); + } + if (entityInfo.containsKey(ApplicationMetricsConstants.QUEUE_ENTITY_INFO)) { + queue = + entityInfo.get(ApplicationMetricsConstants.QUEUE_ENTITY_INFO) + .toString(); + } + if (entityInfo.containsKey(ApplicationMetricsConstants.NAME_ENTITY_INFO)) { + name = + entityInfo.get(ApplicationMetricsConstants.NAME_ENTITY_INFO) + .toString(); + } + if (entityInfo.containsKey(ApplicationMetricsConstants.TYPE_ENTITY_INFO)) { + type = + entityInfo.get(ApplicationMetricsConstants.TYPE_ENTITY_INFO) + .toString(); + } + } + List events = entity.getEvents(); + if (events != null) { + for (TimelineEvent event : events) { + if (event.getEventType().equals( + ApplicationMetricsConstants.CREATED_EVENT_TYPE)) { + createdTime = event.getTimestamp(); + } else if (event.getEventType().equals( + ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) { + finishedTime = event.getTimestamp(); + Map eventInfo = event.getEventInfo(); + if (eventInfo == null) { + continue; + } + if (eventInfo + .containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) { + latestApplicationAttemptId = + ConverterUtils + .toApplicationAttemptId( + eventInfo + .get( + ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) + .toString()); + } + if (eventInfo + .containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { + diagnosticsInfo = + eventInfo.get( + ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO) + .toString(); + } + if (eventInfo + .containsKey(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO)) { + finalStatus = + FinalApplicationStatus.valueOf(eventInfo.get( + ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO) + .toString()); + } + if (eventInfo + .containsKey(ApplicationMetricsConstants.STATE_EVENT_INFO)) { + state = + YarnApplicationState.valueOf(eventInfo.get( + ApplicationMetricsConstants.STATE_EVENT_INFO).toString()); + } } } } - return attempts.get(prevMaxAttemptId); + return new ApplicationReportExt(ApplicationReport.newInstance( + ConverterUtils.toApplicationId(entity.getEntityId()), + latestApplicationAttemptId, user, queue, name, null, -1, null, state, + diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, + null, 1.0F, type, null), appViewACLs); } - private ApplicationAttemptReport convertToApplicationAttemptReport( - ApplicationAttemptHistoryData appAttemptHistory) { + private static ApplicationAttemptReport convertToApplicationAttemptReport( + TimelineEntity entity) { + String host = null; + int rpcPort = -1; + ContainerId amContainerId = null; + String trackingUrl = null; + String originalTrackingUrl = null; + String diagnosticsInfo = null; + YarnApplicationAttemptState state = null; + List events = entity.getEvents(); + if (events != null) { + for (TimelineEvent event : events) { + if (event.getEventType().equals( + AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE)) { + Map eventInfo = event.getEventInfo(); + if (eventInfo == null) { + continue; + } + if (eventInfo.containsKey(AppAttemptMetricsConstants.HOST_EVENT_INFO)) { + host = + eventInfo.get(AppAttemptMetricsConstants.HOST_EVENT_INFO) + .toString(); + } + if (eventInfo + .containsKey(AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO)) { + rpcPort = (Integer) eventInfo.get( + AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO); + } + if (eventInfo + .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { + amContainerId = + ConverterUtils.toContainerId(eventInfo.get( + AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) + .toString()); + } + } else if (event.getEventType().equals( + AppAttemptMetricsConstants.FINISHED_EVENT_TYPE)) { + Map eventInfo = event.getEventInfo(); + if (eventInfo == null) { + continue; + } + if (eventInfo + .containsKey(AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO)) { + trackingUrl = + eventInfo.get( + AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO) + .toString(); + } + if (eventInfo + .containsKey(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO)) { + originalTrackingUrl = + eventInfo + .get( + AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO) + .toString(); + } + if (eventInfo + .containsKey(AppAttemptMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { + diagnosticsInfo = + eventInfo.get( + AppAttemptMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO) + .toString(); + } + if (eventInfo + .containsKey(AppAttemptMetricsConstants.STATE_EVENT_INFO)) { + state = + YarnApplicationAttemptState.valueOf(eventInfo.get( + AppAttemptMetricsConstants.STATE_EVENT_INFO) + .toString()); + } + } + } + } return ApplicationAttemptReport.newInstance( - appAttemptHistory.getApplicationAttemptId(), appAttemptHistory.getHost(), - appAttemptHistory.getRPCPort(), appAttemptHistory.getTrackingURL(), null, - appAttemptHistory.getDiagnosticsInfo(), - appAttemptHistory.getYarnApplicationAttemptState(), - appAttemptHistory.getMasterContainerId()); - } - - @Override - public ApplicationAttemptReport getApplicationAttempt( - ApplicationAttemptId appAttemptId) throws IOException { - return convertToApplicationAttemptReport(historyStore - .getApplicationAttempt(appAttemptId)); + ConverterUtils.toApplicationAttemptId(entity.getEntityId()), + host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo, + state, amContainerId); } - @Override - public Map - getApplicationAttempts(ApplicationId appId) throws IOException { - Map histData = - historyStore.getApplicationAttempts(appId); - HashMap applicationAttemptsReport = - new HashMap(); - for (Entry entry : histData - .entrySet()) { - applicationAttemptsReport.put(entry.getKey(), - convertToApplicationAttemptReport(entry.getValue())); + private static ContainerReport convertToContainerReport( + TimelineEntity entity, String serverHttpAddress, String user) { + int allocatedMem = 0; + int allocatedVcore = 0; + String allocatedHost = null; + int allocatedPort = -1; + int allocatedPriority = 0; + long createdTime = 0; + long finishedTime = 0; + String diagnosticsInfo = null; + int exitStatus = ContainerExitStatus.INVALID; + ContainerState state = null; + Map entityInfo = entity.getOtherInfo(); + if (entityInfo != null) { + if (entityInfo + .containsKey(ContainerMetricsConstants.ALLOCATED_MEMORY_ENTITY_INFO)) { + allocatedMem = (Integer) entityInfo.get( + ContainerMetricsConstants.ALLOCATED_MEMORY_ENTITY_INFO); + } + if (entityInfo + .containsKey(ContainerMetricsConstants.ALLOCATED_VCORE_ENTITY_INFO)) { + allocatedVcore = (Integer) entityInfo.get( + ContainerMetricsConstants.ALLOCATED_VCORE_ENTITY_INFO); + } + if (entityInfo + .containsKey(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO)) { + allocatedHost = + entityInfo + .get(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO) + .toString(); + } + if (entityInfo + .containsKey(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO)) { + allocatedPort = (Integer) entityInfo.get( + ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO); + } + if (entityInfo + .containsKey(ContainerMetricsConstants.ALLOCATED_PRIORITY_ENTITY_INFO)) { + allocatedPriority = (Integer) entityInfo.get( + ContainerMetricsConstants.ALLOCATED_PRIORITY_ENTITY_INFO); + } } - return applicationAttemptsReport; - } - - @Override - public ContainerReport getContainer(ContainerId containerId) - throws IOException { - ApplicationReport app = - getApplication(containerId.getApplicationAttemptId().getApplicationId()); - return convertToContainerReport(historyStore.getContainer(containerId), - app == null ? null: app.getUser()); - } - - private ContainerReport convertToContainerReport( - ContainerHistoryData containerHistory, String user) { - // If the container has the aggregated log, add the server root url + List events = entity.getEvents(); + if (events != null) { + for (TimelineEvent event : events) { + if (event.getEventType().equals( + ContainerMetricsConstants.CREATED_EVENT_TYPE)) { + createdTime = event.getTimestamp(); + } else if (event.getEventType().equals( + ContainerMetricsConstants.FINISHED_EVENT_TYPE)) { + finishedTime = event.getTimestamp(); + Map eventInfo = event.getEventInfo(); + if (eventInfo == null) { + continue; + } + if (eventInfo + .containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { + diagnosticsInfo = + eventInfo.get( + ContainerMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO) + .toString(); + } + if (eventInfo + .containsKey(ContainerMetricsConstants.EXIT_STATUS_EVENT_INFO)) { + exitStatus = (Integer) eventInfo.get( + ContainerMetricsConstants.EXIT_STATUS_EVENT_INFO); + } + if (eventInfo + .containsKey(ContainerMetricsConstants.STATE_EVENT_INFO)) { + state = + ContainerState.valueOf(eventInfo.get( + ContainerMetricsConstants.STATE_EVENT_INFO).toString()); + } + } + } + } + NodeId allocatedNode = NodeId.newInstance(allocatedHost, allocatedPort); + ContainerId containerId = + ConverterUtils.toContainerId(entity.getEntityId()); String logUrl = WebAppUtils.getAggregatedLogURL( serverHttpAddress, - containerHistory.getAssignedNode().toString(), - containerHistory.getContainerId().toString(), - containerHistory.getContainerId().toString(), + allocatedNode.toString(), + containerId.toString(), + containerId.toString(), user); - return ContainerReport.newInstance(containerHistory.getContainerId(), - containerHistory.getAllocatedResource(), - containerHistory.getAssignedNode(), containerHistory.getPriority(), - containerHistory.getStartTime(), containerHistory.getFinishTime(), - containerHistory.getDiagnosticsInfo(), logUrl, - containerHistory.getContainerExitStatus(), - containerHistory.getContainerState()); + return ContainerReport.newInstance( + ConverterUtils.toContainerId(entity.getEntityId()), + Resource.newInstance(allocatedMem, allocatedVcore), + NodeId.newInstance(allocatedHost, allocatedPort), + Priority.newInstance(allocatedPriority), + createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state); } - @Override - public Map getContainers( - ApplicationAttemptId appAttemptId) throws IOException { - ApplicationReport app = - getApplication(appAttemptId.getApplicationId()); - Map histData = - historyStore.getContainers(appAttemptId); - HashMap containersReport = - new HashMap(); - for (Entry entry : histData.entrySet()) { - containersReport.put(entry.getKey(), - convertToContainerReport(entry.getValue(), - app == null ? null : app.getUser())); + private ApplicationReportExt generateApplicationReport(TimelineEntity entity, + ApplicationReportField field) throws YarnException, IOException { + ApplicationReportExt app = convertToApplicationReport(entity, field); + // If only user and acls are pulled to check attempt(s)/container(s) access + // control, we can return immediately + if (field == ApplicationReportField.USER_AND_ACLS) { + return app; + } + try { + checkAccess(app); + if (app.appReport.getCurrentApplicationAttemptId() != null) { + ApplicationAttemptReport appAttempt = + getApplicationAttempt(app.appReport.getCurrentApplicationAttemptId()); + if (appAttempt != null) { + app.appReport.setHost(appAttempt.getHost()); + app.appReport.setRpcPort(appAttempt.getRpcPort()); + app.appReport.setTrackingUrl(appAttempt.getTrackingUrl()); + app.appReport.setOriginalTrackingUrl(appAttempt.getOriginalTrackingUrl()); + } + } + } catch (YarnException e) { + // YarnExcetpion is thrown because the user doesn't have access + app.appReport.setDiagnostics(null); + app.appReport.setCurrentApplicationAttemptId(null); + } + if (app.appReport.getCurrentApplicationAttemptId() == null) { + app.appReport.setCurrentApplicationAttemptId( + ApplicationAttemptId.newInstance(app.appReport.getApplicationId(), -1)); + } + return app; + } + + private ApplicationReportExt getApplication(ApplicationId appId, + ApplicationReportField field) throws YarnException, IOException { + TimelineEntity entity = timelineDataManager.getEntity( + ApplicationMetricsConstants.ENTITY_TYPE, + appId.toString(), EnumSet.allOf(Field.class), + UserGroupInformation.getLoginUser()); + if (entity == null) { + throw new ApplicationNotFoundException("The entity for application " + + appId + " doesn't exist in the timeline store"); + } else { + return generateApplicationReport(entity, field); } - return containersReport; } - @Private - @VisibleForTesting - public ApplicationHistoryStore getHistoryStore() { - return this.historyStore; + private void checkAccess(ApplicationReportExt app) + throws YarnException, IOException { + if (app.appViewACLs != null) { + aclsManager.addApplication( + app.appReport.getApplicationId(), app.appViewACLs); + try { + if (!aclsManager.checkAccess(UserGroupInformation.getCurrentUser(), + ApplicationAccessType.VIEW_APP, app.appReport.getUser(), + app.appReport.getApplicationId())) { + throw new YarnException("User " + + UserGroupInformation.getCurrentUser().getShortUserName() + + " does not have privilage to see this application " + + app.appReport.getApplicationId()); + } + } finally { + aclsManager.removeApplication(app.appReport.getApplicationId()); + } + } + } + + private static enum ApplicationReportField { + ALL, // retrieve all the fields + USER_AND_ACLS // retrieve user and ACLs info only } + + private static class ApplicationReportExt { + private ApplicationReport appReport; + private Map appViewACLs; + + public ApplicationReportExt( + ApplicationReport appReport, + Map appViewACLs) { + this.appReport = appReport; + this.appViewACLs = appViewACLs; + } + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java deleted file mode 100644 index 5381bd6..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java +++ /dev/null @@ -1,564 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.ApplicationAccessType; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ApplicationReport; -import org.apache.hadoop.yarn.api.records.ContainerExitStatus; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerReport; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException; -import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; -import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.server.metrics.AppAttemptMetricsConstants; -import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; -import org.apache.hadoop.yarn.server.metrics.ContainerMetricsConstants; -import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; -import org.apache.hadoop.yarn.server.timeline.NameValuePair; -import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; -import org.apache.hadoop.yarn.server.timeline.TimelineReader.Field; -import org.apache.hadoop.yarn.util.ConverterUtils; -import org.apache.hadoop.yarn.webapp.util.WebAppUtils; - -public class ApplicationHistoryManagerOnTimelineStore extends AbstractService - implements - ApplicationHistoryManager { - - private TimelineDataManager timelineDataManager; - private ApplicationACLsManager aclsManager; - private String serverHttpAddress; - - public ApplicationHistoryManagerOnTimelineStore( - TimelineDataManager timelineDataManager, - ApplicationACLsManager aclsManager) { - super(ApplicationHistoryManagerOnTimelineStore.class.getName()); - this.timelineDataManager = timelineDataManager; - this.aclsManager = aclsManager; - } - - @Override - protected void serviceInit(Configuration conf) throws Exception { - serverHttpAddress = WebAppUtils.getHttpSchemePrefix(conf) + - WebAppUtils.getAHSWebAppURLWithoutScheme(conf); - super.serviceInit(conf); - } - - @Override - public ApplicationReport getApplication(ApplicationId appId) - throws YarnException, IOException { - return getApplication(appId, ApplicationReportField.ALL).appReport; - } - - @Override - public Map getAllApplications() - throws YarnException, IOException { - TimelineEntities entities = timelineDataManager.getEntities( - ApplicationMetricsConstants.ENTITY_TYPE, null, null, null, null, - null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), - UserGroupInformation.getLoginUser()); - Map apps = - new HashMap(); - if (entities != null && entities.getEntities() != null) { - for (TimelineEntity entity : entities.getEntities()) { - ApplicationReportExt app = - generateApplicationReport(entity, ApplicationReportField.ALL); - apps.put(app.appReport.getApplicationId(), app.appReport); - } - } - return apps; - } - - @Override - public Map - getApplicationAttempts(ApplicationId appId) - throws YarnException, IOException { - ApplicationReportExt app = getApplication( - appId, ApplicationReportField.USER_AND_ACLS); - checkAccess(app); - TimelineEntities entities = timelineDataManager.getEntities( - AppAttemptMetricsConstants.ENTITY_TYPE, - new NameValuePair( - AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId - .toString()), null, null, null, null, null, - Long.MAX_VALUE, EnumSet.allOf(Field.class), - UserGroupInformation.getLoginUser()); - Map appAttempts = - new HashMap(); - for (TimelineEntity entity : entities.getEntities()) { - ApplicationAttemptReport appAttempt = - convertToApplicationAttemptReport(entity); - appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt); - } - return appAttempts; - } - - @Override - public ApplicationAttemptReport getApplicationAttempt( - ApplicationAttemptId appAttemptId) throws YarnException, IOException { - ApplicationReportExt app = getApplication( - appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS); - checkAccess(app); - TimelineEntity entity = timelineDataManager.getEntity( - AppAttemptMetricsConstants.ENTITY_TYPE, - appAttemptId.toString(), EnumSet.allOf(Field.class), - UserGroupInformation.getLoginUser()); - if (entity == null) { - throw new ApplicationAttemptNotFoundException( - "The entity for application attempt " + appAttemptId + - " doesn't exist in the timeline store"); - } else { - return convertToApplicationAttemptReport(entity); - } - } - - @Override - public ContainerReport getContainer(ContainerId containerId) - throws YarnException, IOException { - ApplicationReportExt app = getApplication( - containerId.getApplicationAttemptId().getApplicationId(), - ApplicationReportField.USER_AND_ACLS); - checkAccess(app); - TimelineEntity entity = timelineDataManager.getEntity( - ContainerMetricsConstants.ENTITY_TYPE, - containerId.toString(), EnumSet.allOf(Field.class), - UserGroupInformation.getLoginUser()); - if (entity == null) { - throw new ContainerNotFoundException( - "The entity for container " + containerId + - " doesn't exist in the timeline store"); - } else { - return convertToContainerReport( - entity, serverHttpAddress, app.appReport.getUser()); - } - } - - @Override - public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId) - throws YarnException, IOException { - ApplicationAttemptReport appAttempt = getApplicationAttempt(appAttemptId); - return getContainer(appAttempt.getAMContainerId()); - } - - @Override - public Map getContainers( - ApplicationAttemptId appAttemptId) throws YarnException, IOException { - ApplicationReportExt app = getApplication( - appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS); - checkAccess(app); - TimelineEntities entities = timelineDataManager.getEntities( - ContainerMetricsConstants.ENTITY_TYPE, - new NameValuePair( - ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, - appAttemptId.toString()), null, null, null, - null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), - UserGroupInformation.getLoginUser()); - Map containers = - new HashMap(); - if (entities != null && entities.getEntities() != null) { - for (TimelineEntity entity : entities.getEntities()) { - ContainerReport container = convertToContainerReport( - entity, serverHttpAddress, app.appReport.getUser()); - containers.put(container.getContainerId(), container); - } - } - return containers; - } - - private static ApplicationReportExt convertToApplicationReport( - TimelineEntity entity, ApplicationReportField field) { - String user = null; - String queue = null; - String name = null; - String type = null; - long createdTime = 0; - long finishedTime = 0; - ApplicationAttemptId latestApplicationAttemptId = null; - String diagnosticsInfo = null; - FinalApplicationStatus finalStatus = FinalApplicationStatus.UNDEFINED; - YarnApplicationState state = null; - Map appViewACLs = - new HashMap(); - Map entityInfo = entity.getOtherInfo(); - if (entityInfo != null) { - if (entityInfo.containsKey(ApplicationMetricsConstants.USER_ENTITY_INFO)) { - user = - entityInfo.get(ApplicationMetricsConstants.USER_ENTITY_INFO) - .toString(); - } - if (entityInfo.containsKey(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO)) { - String appViewACLsStr = entityInfo.get( - ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO).toString(); - if (appViewACLsStr.length() > 0) { - appViewACLs.put(ApplicationAccessType.VIEW_APP, appViewACLsStr); - } - } - if (field == ApplicationReportField.USER_AND_ACLS) { - return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), - latestApplicationAttemptId, user, queue, name, null, -1, null, state, - diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, - null, 1.0F, type, null), appViewACLs); - } - if (entityInfo.containsKey(ApplicationMetricsConstants.QUEUE_ENTITY_INFO)) { - queue = - entityInfo.get(ApplicationMetricsConstants.QUEUE_ENTITY_INFO) - .toString(); - } - if (entityInfo.containsKey(ApplicationMetricsConstants.NAME_ENTITY_INFO)) { - name = - entityInfo.get(ApplicationMetricsConstants.NAME_ENTITY_INFO) - .toString(); - } - if (entityInfo.containsKey(ApplicationMetricsConstants.TYPE_ENTITY_INFO)) { - type = - entityInfo.get(ApplicationMetricsConstants.TYPE_ENTITY_INFO) - .toString(); - } - } - List events = entity.getEvents(); - if (events != null) { - for (TimelineEvent event : events) { - if (event.getEventType().equals( - ApplicationMetricsConstants.CREATED_EVENT_TYPE)) { - createdTime = event.getTimestamp(); - } else if (event.getEventType().equals( - ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) { - finishedTime = event.getTimestamp(); - Map eventInfo = event.getEventInfo(); - if (eventInfo == null) { - continue; - } - if (eventInfo - .containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) { - latestApplicationAttemptId = - ConverterUtils - .toApplicationAttemptId( - eventInfo - .get( - ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) - .toString()); - } - if (eventInfo - .containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { - diagnosticsInfo = - eventInfo.get( - ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO) - .toString(); - } - if (eventInfo - .containsKey(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO)) { - finalStatus = - FinalApplicationStatus.valueOf(eventInfo.get( - ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO) - .toString()); - } - if (eventInfo - .containsKey(ApplicationMetricsConstants.STATE_EVENT_INFO)) { - state = - YarnApplicationState.valueOf(eventInfo.get( - ApplicationMetricsConstants.STATE_EVENT_INFO).toString()); - } - } - } - } - return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), - latestApplicationAttemptId, user, queue, name, null, -1, null, state, - diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, - null, 1.0F, type, null), appViewACLs); - } - - private static ApplicationAttemptReport convertToApplicationAttemptReport( - TimelineEntity entity) { - String host = null; - int rpcPort = -1; - ContainerId amContainerId = null; - String trackingUrl = null; - String originalTrackingUrl = null; - String diagnosticsInfo = null; - YarnApplicationAttemptState state = null; - List events = entity.getEvents(); - if (events != null) { - for (TimelineEvent event : events) { - if (event.getEventType().equals( - AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE)) { - Map eventInfo = event.getEventInfo(); - if (eventInfo == null) { - continue; - } - if (eventInfo.containsKey(AppAttemptMetricsConstants.HOST_EVENT_INFO)) { - host = - eventInfo.get(AppAttemptMetricsConstants.HOST_EVENT_INFO) - .toString(); - } - if (eventInfo - .containsKey(AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO)) { - rpcPort = (Integer) eventInfo.get( - AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO); - } - if (eventInfo - .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { - amContainerId = - ConverterUtils.toContainerId(eventInfo.get( - AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) - .toString()); - } - } else if (event.getEventType().equals( - AppAttemptMetricsConstants.FINISHED_EVENT_TYPE)) { - Map eventInfo = event.getEventInfo(); - if (eventInfo == null) { - continue; - } - if (eventInfo - .containsKey(AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO)) { - trackingUrl = - eventInfo.get( - AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO) - .toString(); - } - if (eventInfo - .containsKey(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO)) { - originalTrackingUrl = - eventInfo - .get( - AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO) - .toString(); - } - if (eventInfo - .containsKey(AppAttemptMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { - diagnosticsInfo = - eventInfo.get( - AppAttemptMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO) - .toString(); - } - if (eventInfo - .containsKey(AppAttemptMetricsConstants.STATE_EVENT_INFO)) { - state = - YarnApplicationAttemptState.valueOf(eventInfo.get( - AppAttemptMetricsConstants.STATE_EVENT_INFO) - .toString()); - } - } - } - } - return ApplicationAttemptReport.newInstance( - ConverterUtils.toApplicationAttemptId(entity.getEntityId()), - host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo, - state, amContainerId); - } - - private static ContainerReport convertToContainerReport( - TimelineEntity entity, String serverHttpAddress, String user) { - int allocatedMem = 0; - int allocatedVcore = 0; - String allocatedHost = null; - int allocatedPort = -1; - int allocatedPriority = 0; - long createdTime = 0; - long finishedTime = 0; - String diagnosticsInfo = null; - int exitStatus = ContainerExitStatus.INVALID; - ContainerState state = null; - Map entityInfo = entity.getOtherInfo(); - if (entityInfo != null) { - if (entityInfo - .containsKey(ContainerMetricsConstants.ALLOCATED_MEMORY_ENTITY_INFO)) { - allocatedMem = (Integer) entityInfo.get( - ContainerMetricsConstants.ALLOCATED_MEMORY_ENTITY_INFO); - } - if (entityInfo - .containsKey(ContainerMetricsConstants.ALLOCATED_VCORE_ENTITY_INFO)) { - allocatedVcore = (Integer) entityInfo.get( - ContainerMetricsConstants.ALLOCATED_VCORE_ENTITY_INFO); - } - if (entityInfo - .containsKey(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO)) { - allocatedHost = - entityInfo - .get(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO) - .toString(); - } - if (entityInfo - .containsKey(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO)) { - allocatedPort = (Integer) entityInfo.get( - ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO); - } - if (entityInfo - .containsKey(ContainerMetricsConstants.ALLOCATED_PRIORITY_ENTITY_INFO)) { - allocatedPriority = (Integer) entityInfo.get( - ContainerMetricsConstants.ALLOCATED_PRIORITY_ENTITY_INFO); - } - } - List events = entity.getEvents(); - if (events != null) { - for (TimelineEvent event : events) { - if (event.getEventType().equals( - ContainerMetricsConstants.CREATED_EVENT_TYPE)) { - createdTime = event.getTimestamp(); - } else if (event.getEventType().equals( - ContainerMetricsConstants.FINISHED_EVENT_TYPE)) { - finishedTime = event.getTimestamp(); - Map eventInfo = event.getEventInfo(); - if (eventInfo == null) { - continue; - } - if (eventInfo - .containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { - diagnosticsInfo = - eventInfo.get( - ContainerMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO) - .toString(); - } - if (eventInfo - .containsKey(ContainerMetricsConstants.EXIT_STATUS_EVENT_INFO)) { - exitStatus = (Integer) eventInfo.get( - ContainerMetricsConstants.EXIT_STATUS_EVENT_INFO); - } - if (eventInfo - .containsKey(ContainerMetricsConstants.STATE_EVENT_INFO)) { - state = - ContainerState.valueOf(eventInfo.get( - ContainerMetricsConstants.STATE_EVENT_INFO).toString()); - } - } - } - } - NodeId allocatedNode = NodeId.newInstance(allocatedHost, allocatedPort); - ContainerId containerId = - ConverterUtils.toContainerId(entity.getEntityId()); - String logUrl = WebAppUtils.getAggregatedLogURL( - serverHttpAddress, - allocatedNode.toString(), - containerId.toString(), - containerId.toString(), - user); - return ContainerReport.newInstance( - ConverterUtils.toContainerId(entity.getEntityId()), - Resource.newInstance(allocatedMem, allocatedVcore), - NodeId.newInstance(allocatedHost, allocatedPort), - Priority.newInstance(allocatedPriority), - createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state); - } - - private ApplicationReportExt generateApplicationReport(TimelineEntity entity, - ApplicationReportField field) throws YarnException, IOException { - ApplicationReportExt app = convertToApplicationReport(entity, field); - // If only user and acls are pulled to check attempt(s)/container(s) access - // control, we can return immediately - if (field == ApplicationReportField.USER_AND_ACLS) { - return app; - } - try { - checkAccess(app); - if (app.appReport.getCurrentApplicationAttemptId() != null) { - ApplicationAttemptReport appAttempt = - getApplicationAttempt(app.appReport.getCurrentApplicationAttemptId()); - if (appAttempt != null) { - app.appReport.setHost(appAttempt.getHost()); - app.appReport.setRpcPort(appAttempt.getRpcPort()); - app.appReport.setTrackingUrl(appAttempt.getTrackingUrl()); - app.appReport.setOriginalTrackingUrl(appAttempt.getOriginalTrackingUrl()); - } - } - } catch (YarnException e) { - // YarnExcetpion is thrown because the user doesn't have access - app.appReport.setDiagnostics(null); - app.appReport.setCurrentApplicationAttemptId(null); - } - if (app.appReport.getCurrentApplicationAttemptId() == null) { - app.appReport.setCurrentApplicationAttemptId( - ApplicationAttemptId.newInstance(app.appReport.getApplicationId(), -1)); - } - return app; - } - - private ApplicationReportExt getApplication(ApplicationId appId, - ApplicationReportField field) throws YarnException, IOException { - TimelineEntity entity = timelineDataManager.getEntity( - ApplicationMetricsConstants.ENTITY_TYPE, - appId.toString(), EnumSet.allOf(Field.class), - UserGroupInformation.getLoginUser()); - if (entity == null) { - throw new ApplicationNotFoundException("The entity for application " + - appId + " doesn't exist in the timeline store"); - } else { - return generateApplicationReport(entity, field); - } - } - - private void checkAccess(ApplicationReportExt app) - throws YarnException, IOException { - if (app.appViewACLs != null) { - aclsManager.addApplication( - app.appReport.getApplicationId(), app.appViewACLs); - try { - if (!aclsManager.checkAccess(UserGroupInformation.getCurrentUser(), - ApplicationAccessType.VIEW_APP, app.appReport.getUser(), - app.appReport.getApplicationId())) { - throw new YarnException("User " - + UserGroupInformation.getCurrentUser().getShortUserName() - + " does not have privilage to see this application " - + app.appReport.getApplicationId()); - } - } finally { - aclsManager.removeApplication(app.appReport.getApplicationId()); - } - } - } - - private static enum ApplicationReportField { - ALL, // retrieve all the fields - USER_AND_ACLS // retrieve user and ACLs info only - } - - private static class ApplicationReportExt { - private ApplicationReport appReport; - private Map appViewACLs; - - public ApplicationReportExt( - ApplicationReport appReport, - Map appViewACLs) { - this.appReport = appReport; - this.appViewACLs = appViewACLs; - } - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java deleted file mode 100644 index 590853a..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; -import java.util.Map; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; - -@InterfaceAudience.Public -@InterfaceStability.Unstable -public interface ApplicationHistoryReader { - - /** - * This method returns Application {@link ApplicationHistoryData} for the - * specified {@link ApplicationId}. - * - * @param appId - * - * @return {@link ApplicationHistoryData} for the ApplicationId. - * @throws IOException - */ - ApplicationHistoryData getApplication(ApplicationId appId) throws IOException; - - /** - * This method returns all Application {@link ApplicationHistoryData}s - * - * @return map of {@link ApplicationId} to {@link ApplicationHistoryData}s. - * @throws IOException - */ - Map getAllApplications() - throws IOException; - - /** - * Application can have multiple application attempts - * {@link ApplicationAttemptHistoryData}. This method returns the all - * {@link ApplicationAttemptHistoryData}s for the Application. - * - * @param appId - * - * @return all {@link ApplicationAttemptHistoryData}s for the Application. - * @throws IOException - */ - Map - getApplicationAttempts(ApplicationId appId) throws IOException; - - /** - * This method returns {@link ApplicationAttemptHistoryData} for specified - * {@link ApplicationId}. - * - * @param appAttemptId - * {@link ApplicationAttemptId} - * @return {@link ApplicationAttemptHistoryData} for ApplicationAttemptId - * @throws IOException - */ - ApplicationAttemptHistoryData getApplicationAttempt( - ApplicationAttemptId appAttemptId) throws IOException; - - /** - * This method returns {@link ContainerHistoryData} for specified - * {@link ContainerId}. - * - * @param containerId - * {@link ContainerId} - * @return {@link ContainerHistoryData} for ContainerId - * @throws IOException - */ - ContainerHistoryData getContainer(ContainerId containerId) throws IOException; - - /** - * This method returns {@link ContainerHistoryData} for specified - * {@link ApplicationAttemptId}. - * - * @param appAttemptId - * {@link ApplicationAttemptId} - * @return {@link ContainerHistoryData} for ApplicationAttemptId - * @throws IOException - */ - ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) - throws IOException; - - /** - * This method returns Map{@link ContainerId} to {@link ContainerHistoryData} - * for specified {@link ApplicationAttemptId}. - * - * @param appAttemptId - * {@link ApplicationAttemptId} - * @return Map{@link ContainerId} to {@link ContainerHistoryData} for - * ApplicationAttemptId - * @throws IOException - */ - Map getContainers( - ApplicationAttemptId appAttemptId) throws IOException; -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index 87761f1..2366e08 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -179,19 +179,7 @@ private ApplicationACLsManager createApplicationACLsManager( private ApplicationHistoryManager createApplicationHistoryManager( Configuration conf) { - // Backward compatibility: - // APPLICATION_HISTORY_STORE is neither null nor empty, it means that the - // user has enabled it explicitly. - if (conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE) == null || - conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE).length() == 0 || - conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE).equals( - NullApplicationHistoryStore.class.getName())) { - return new ApplicationHistoryManagerOnTimelineStore( - timelineDataManager, aclsManager); - } else { - LOG.warn("The filesystem based application history store is deprecated."); - return new ApplicationHistoryManagerImpl(); - } + return new ApplicationHistoryManagerImpl(timelineDataManager, aclsManager); } private TimelineStore createTimelineStore( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java deleted file mode 100644 index c26faef..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.service.Service; - -/** - * This class is the abstract of the storage of the application history data. It - * is a {@link Service}, such that the implementation of this class can make use - * of the service life cycle to initialize and cleanup the storage. Users can - * access the storage via {@link ApplicationHistoryReader} and - * {@link ApplicationHistoryWriter} interfaces. - * - */ -@InterfaceAudience.Public -@InterfaceStability.Unstable -public interface ApplicationHistoryStore extends Service, - ApplicationHistoryReader, ApplicationHistoryWriter { -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java deleted file mode 100644 index 09ba36d..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java +++ /dev/null @@ -1,112 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; - -/** - * It is the interface of writing the application history, exposing the methods - * of writing {@link ApplicationStartData}, {@link ApplicationFinishData} - * {@link ApplicationAttemptStartData}, {@link ApplicationAttemptFinishData}, - * {@link ContainerStartData} and {@link ContainerFinishData}. - */ -@Private -@Unstable -public interface ApplicationHistoryWriter { - - /** - * This method writes the information of RMApp that is available - * when it starts. - * - * @param appStart - * the record of the information of RMApp that is - * available when it starts - * @throws IOException - */ - void applicationStarted(ApplicationStartData appStart) throws IOException; - - /** - * This method writes the information of RMApp that is available - * when it finishes. - * - * @param appFinish - * the record of the information of RMApp that is - * available when it finishes - * @throws IOException - */ - void applicationFinished(ApplicationFinishData appFinish) throws IOException; - - /** - * This method writes the information of RMAppAttempt that is - * available when it starts. - * - * @param appAttemptStart - * the record of the information of RMAppAttempt that is - * available when it starts - * @throws IOException - */ - void applicationAttemptStarted(ApplicationAttemptStartData appAttemptStart) - throws IOException; - - /** - * This method writes the information of RMAppAttempt that is - * available when it finishes. - * - * @param appAttemptFinish - * the record of the information of RMAppAttempt that is - * available when it finishes - * @throws IOException - */ - void - applicationAttemptFinished(ApplicationAttemptFinishData appAttemptFinish) - throws IOException; - - /** - * This method writes the information of RMContainer that is - * available when it starts. - * - * @param containerStart - * the record of the information of RMContainer that is - * available when it starts - * @throws IOException - */ - void containerStarted(ContainerStartData containerStart) throws IOException; - - /** - * This method writes the information of RMContainer that is - * available when it finishes. - * - * @param containerFinish - * the record of the information of RMContainer that is - * available when it finishes - * @throws IOException - */ - void containerFinished(ContainerFinishData containerFinish) - throws IOException; - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java deleted file mode 100644 index 6d76864..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java +++ /dev/null @@ -1,795 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.DataOutput; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.file.tfile.TFile; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptFinishDataPBImpl; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptStartDataPBImpl; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationFinishDataPBImpl; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationStartDataPBImpl; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerFinishDataPBImpl; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerStartDataPBImpl; -import org.apache.hadoop.yarn.util.ConverterUtils; - -import com.google.protobuf.InvalidProtocolBufferException; - -/** - * File system implementation of {@link ApplicationHistoryStore}. In this - * implementation, one application will have just one file in the file system, - * which contains all the history data of one application, and its attempts and - * containers. {@link #applicationStarted(ApplicationStartData)} is supposed to - * be invoked first when writing any history data of one application and it will - * open a file, while {@link #applicationFinished(ApplicationFinishData)} is - * supposed to be last writing operation and will close the file. - */ -@Public -@Unstable -public class FileSystemApplicationHistoryStore extends AbstractService - implements ApplicationHistoryStore { - - private static final Log LOG = LogFactory - .getLog(FileSystemApplicationHistoryStore.class); - - private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot"; - private static final int MIN_BLOCK_SIZE = 256 * 1024; - private static final String START_DATA_SUFFIX = "_start"; - private static final String FINISH_DATA_SUFFIX = "_finish"; - private static final FsPermission ROOT_DIR_UMASK = FsPermission - .createImmutable((short) 0740); - private static final FsPermission HISTORY_FILE_UMASK = FsPermission - .createImmutable((short) 0640); - - private FileSystem fs; - private Path rootDirPath; - - private ConcurrentMap outstandingWriters = - new ConcurrentHashMap(); - - public FileSystemApplicationHistoryStore() { - super(FileSystemApplicationHistoryStore.class.getName()); - } - - protected FileSystem getFileSystem(Path path, Configuration conf) throws Exception { - return path.getFileSystem(conf); - } - - @Override - public void serviceStart() throws Exception { - Configuration conf = getConfig(); - Path fsWorkingPath = - new Path(conf.get(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI, - conf.get("hadoop.tmp.dir") + "/yarn/timeline/generic-history")); - rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME); - try { - fs = getFileSystem(fsWorkingPath, conf); - - if (!fs.isDirectory(rootDirPath)) { - fs.mkdirs(rootDirPath); - fs.setPermission(rootDirPath, ROOT_DIR_UMASK); - } - - } catch (IOException e) { - LOG.error("Error when initializing FileSystemHistoryStorage", e); - throw e; - } - super.serviceStart(); - } - - @Override - public void serviceStop() throws Exception { - try { - for (Entry entry : outstandingWriters - .entrySet()) { - entry.getValue().close(); - } - outstandingWriters.clear(); - } finally { - IOUtils.cleanup(LOG, fs); - } - super.serviceStop(); - } - - @Override - public ApplicationHistoryData getApplication(ApplicationId appId) - throws IOException { - HistoryFileReader hfReader = getHistoryFileReader(appId); - try { - boolean readStartData = false; - boolean readFinishData = false; - ApplicationHistoryData historyData = - ApplicationHistoryData.newInstance(appId, null, null, null, null, - Long.MIN_VALUE, Long.MIN_VALUE, Long.MAX_VALUE, null, - FinalApplicationStatus.UNDEFINED, null); - while ((!readStartData || !readFinishData) && hfReader.hasNext()) { - HistoryFileReader.Entry entry = hfReader.next(); - if (entry.key.id.equals(appId.toString())) { - if (entry.key.suffix.equals(START_DATA_SUFFIX)) { - ApplicationStartData startData = - parseApplicationStartData(entry.value); - mergeApplicationHistoryData(historyData, startData); - readStartData = true; - } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { - ApplicationFinishData finishData = - parseApplicationFinishData(entry.value); - mergeApplicationHistoryData(historyData, finishData); - readFinishData = true; - } - } - } - if (!readStartData && !readFinishData) { - return null; - } - if (!readStartData) { - LOG.warn("Start information is missing for application " + appId); - } - if (!readFinishData) { - LOG.warn("Finish information is missing for application " + appId); - } - LOG.info("Completed reading history information of application " + appId); - return historyData; - } catch (IOException e) { - LOG.error("Error when reading history file of application " + appId, e); - throw e; - } finally { - hfReader.close(); - } - } - - @Override - public Map getAllApplications() - throws IOException { - Map historyDataMap = - new HashMap(); - FileStatus[] files = fs.listStatus(rootDirPath); - for (FileStatus file : files) { - ApplicationId appId = - ConverterUtils.toApplicationId(file.getPath().getName()); - try { - ApplicationHistoryData historyData = getApplication(appId); - if (historyData != null) { - historyDataMap.put(appId, historyData); - } - } catch (IOException e) { - // Eat the exception not to disturb the getting the next - // ApplicationHistoryData - LOG.error("History information of application " + appId - + " is not included into the result due to the exception", e); - } - } - return historyDataMap; - } - - @Override - public Map - getApplicationAttempts(ApplicationId appId) throws IOException { - Map historyDataMap = - new HashMap(); - HistoryFileReader hfReader = getHistoryFileReader(appId); - try { - while (hfReader.hasNext()) { - HistoryFileReader.Entry entry = hfReader.next(); - if (entry.key.id.startsWith( - ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) { - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(entry.key.id); - if (appAttemptId.getApplicationId().equals(appId)) { - ApplicationAttemptHistoryData historyData = - historyDataMap.get(appAttemptId); - if (historyData == null) { - historyData = ApplicationAttemptHistoryData.newInstance( - appAttemptId, null, -1, null, null, null, - FinalApplicationStatus.UNDEFINED, null); - historyDataMap.put(appAttemptId, historyData); - } - if (entry.key.suffix.equals(START_DATA_SUFFIX)) { - mergeApplicationAttemptHistoryData(historyData, - parseApplicationAttemptStartData(entry.value)); - } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { - mergeApplicationAttemptHistoryData(historyData, - parseApplicationAttemptFinishData(entry.value)); - } - } - } - } - LOG.info("Completed reading history information of all application" - + " attempts of application " + appId); - } catch (IOException e) { - LOG.info("Error when reading history information of some application" - + " attempts of application " + appId); - } finally { - hfReader.close(); - } - return historyDataMap; - } - - @Override - public ApplicationAttemptHistoryData getApplicationAttempt( - ApplicationAttemptId appAttemptId) throws IOException { - HistoryFileReader hfReader = - getHistoryFileReader(appAttemptId.getApplicationId()); - try { - boolean readStartData = false; - boolean readFinishData = false; - ApplicationAttemptHistoryData historyData = - ApplicationAttemptHistoryData.newInstance(appAttemptId, null, -1, - null, null, null, FinalApplicationStatus.UNDEFINED, null); - while ((!readStartData || !readFinishData) && hfReader.hasNext()) { - HistoryFileReader.Entry entry = hfReader.next(); - if (entry.key.id.equals(appAttemptId.toString())) { - if (entry.key.suffix.equals(START_DATA_SUFFIX)) { - ApplicationAttemptStartData startData = - parseApplicationAttemptStartData(entry.value); - mergeApplicationAttemptHistoryData(historyData, startData); - readStartData = true; - } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { - ApplicationAttemptFinishData finishData = - parseApplicationAttemptFinishData(entry.value); - mergeApplicationAttemptHistoryData(historyData, finishData); - readFinishData = true; - } - } - } - if (!readStartData && !readFinishData) { - return null; - } - if (!readStartData) { - LOG.warn("Start information is missing for application attempt " - + appAttemptId); - } - if (!readFinishData) { - LOG.warn("Finish information is missing for application attempt " - + appAttemptId); - } - LOG.info("Completed reading history information of application attempt " - + appAttemptId); - return historyData; - } catch (IOException e) { - LOG.error("Error when reading history file of application attempt" - + appAttemptId, e); - throw e; - } finally { - hfReader.close(); - } - } - - @Override - public ContainerHistoryData getContainer(ContainerId containerId) - throws IOException { - HistoryFileReader hfReader = - getHistoryFileReader(containerId.getApplicationAttemptId() - .getApplicationId()); - try { - boolean readStartData = false; - boolean readFinishData = false; - ContainerHistoryData historyData = - ContainerHistoryData - .newInstance(containerId, null, null, null, Long.MIN_VALUE, - Long.MAX_VALUE, null, Integer.MAX_VALUE, null); - while ((!readStartData || !readFinishData) && hfReader.hasNext()) { - HistoryFileReader.Entry entry = hfReader.next(); - if (entry.key.id.equals(containerId.toString())) { - if (entry.key.suffix.equals(START_DATA_SUFFIX)) { - ContainerStartData startData = parseContainerStartData(entry.value); - mergeContainerHistoryData(historyData, startData); - readStartData = true; - } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { - ContainerFinishData finishData = - parseContainerFinishData(entry.value); - mergeContainerHistoryData(historyData, finishData); - readFinishData = true; - } - } - } - if (!readStartData && !readFinishData) { - return null; - } - if (!readStartData) { - LOG.warn("Start information is missing for container " + containerId); - } - if (!readFinishData) { - LOG.warn("Finish information is missing for container " + containerId); - } - LOG.info("Completed reading history information of container " - + containerId); - return historyData; - } catch (IOException e) { - LOG.error("Error when reading history file of container " + containerId, e); - throw e; - } finally { - hfReader.close(); - } - } - - @Override - public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) - throws IOException { - ApplicationAttemptHistoryData attemptHistoryData = - getApplicationAttempt(appAttemptId); - if (attemptHistoryData == null - || attemptHistoryData.getMasterContainerId() == null) { - return null; - } - return getContainer(attemptHistoryData.getMasterContainerId()); - } - - @Override - public Map getContainers( - ApplicationAttemptId appAttemptId) throws IOException { - Map historyDataMap = - new HashMap(); - HistoryFileReader hfReader = - getHistoryFileReader(appAttemptId.getApplicationId()); - try { - while (hfReader.hasNext()) { - HistoryFileReader.Entry entry = hfReader.next(); - if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) { - ContainerId containerId = - ConverterUtils.toContainerId(entry.key.id); - if (containerId.getApplicationAttemptId().equals(appAttemptId)) { - ContainerHistoryData historyData = - historyDataMap.get(containerId); - if (historyData == null) { - historyData = ContainerHistoryData.newInstance( - containerId, null, null, null, Long.MIN_VALUE, - Long.MAX_VALUE, null, Integer.MAX_VALUE, null); - historyDataMap.put(containerId, historyData); - } - if (entry.key.suffix.equals(START_DATA_SUFFIX)) { - mergeContainerHistoryData(historyData, - parseContainerStartData(entry.value)); - } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { - mergeContainerHistoryData(historyData, - parseContainerFinishData(entry.value)); - } - } - } - } - LOG.info("Completed reading history information of all conatiners" - + " of application attempt " + appAttemptId); - } catch (IOException e) { - LOG.info("Error when reading history information of some containers" - + " of application attempt " + appAttemptId); - } finally { - hfReader.close(); - } - return historyDataMap; - } - - @Override - public void applicationStarted(ApplicationStartData appStart) - throws IOException { - HistoryFileWriter hfWriter = - outstandingWriters.get(appStart.getApplicationId()); - if (hfWriter == null) { - Path applicationHistoryFile = - new Path(rootDirPath, appStart.getApplicationId().toString()); - try { - hfWriter = new HistoryFileWriter(applicationHistoryFile); - LOG.info("Opened history file of application " - + appStart.getApplicationId()); - } catch (IOException e) { - LOG.error("Error when openning history file of application " - + appStart.getApplicationId(), e); - throw e; - } - outstandingWriters.put(appStart.getApplicationId(), hfWriter); - } else { - throw new IOException("History file of application " - + appStart.getApplicationId() + " is already opened"); - } - assert appStart instanceof ApplicationStartDataPBImpl; - try { - hfWriter.writeHistoryData(new HistoryDataKey(appStart.getApplicationId() - .toString(), START_DATA_SUFFIX), - ((ApplicationStartDataPBImpl) appStart).getProto().toByteArray()); - LOG.info("Start information of application " - + appStart.getApplicationId() + " is written"); - } catch (IOException e) { - LOG.error("Error when writing start information of application " - + appStart.getApplicationId(), e); - throw e; - } - } - - @Override - public void applicationFinished(ApplicationFinishData appFinish) - throws IOException { - HistoryFileWriter hfWriter = - getHistoryFileWriter(appFinish.getApplicationId()); - assert appFinish instanceof ApplicationFinishDataPBImpl; - try { - hfWriter.writeHistoryData(new HistoryDataKey(appFinish.getApplicationId() - .toString(), FINISH_DATA_SUFFIX), - ((ApplicationFinishDataPBImpl) appFinish).getProto().toByteArray()); - LOG.info("Finish information of application " - + appFinish.getApplicationId() + " is written"); - } catch (IOException e) { - LOG.error("Error when writing finish information of application " - + appFinish.getApplicationId(), e); - throw e; - } finally { - hfWriter.close(); - outstandingWriters.remove(appFinish.getApplicationId()); - } - } - - @Override - public void applicationAttemptStarted( - ApplicationAttemptStartData appAttemptStart) throws IOException { - HistoryFileWriter hfWriter = - getHistoryFileWriter(appAttemptStart.getApplicationAttemptId() - .getApplicationId()); - assert appAttemptStart instanceof ApplicationAttemptStartDataPBImpl; - try { - hfWriter.writeHistoryData(new HistoryDataKey(appAttemptStart - .getApplicationAttemptId().toString(), START_DATA_SUFFIX), - ((ApplicationAttemptStartDataPBImpl) appAttemptStart).getProto() - .toByteArray()); - LOG.info("Start information of application attempt " - + appAttemptStart.getApplicationAttemptId() + " is written"); - } catch (IOException e) { - LOG.error("Error when writing start information of application attempt " - + appAttemptStart.getApplicationAttemptId(), e); - throw e; - } - } - - @Override - public void applicationAttemptFinished( - ApplicationAttemptFinishData appAttemptFinish) throws IOException { - HistoryFileWriter hfWriter = - getHistoryFileWriter(appAttemptFinish.getApplicationAttemptId() - .getApplicationId()); - assert appAttemptFinish instanceof ApplicationAttemptFinishDataPBImpl; - try { - hfWriter.writeHistoryData(new HistoryDataKey(appAttemptFinish - .getApplicationAttemptId().toString(), FINISH_DATA_SUFFIX), - ((ApplicationAttemptFinishDataPBImpl) appAttemptFinish).getProto() - .toByteArray()); - LOG.info("Finish information of application attempt " - + appAttemptFinish.getApplicationAttemptId() + " is written"); - } catch (IOException e) { - LOG.error("Error when writing finish information of application attempt " - + appAttemptFinish.getApplicationAttemptId(), e); - throw e; - } - } - - @Override - public void containerStarted(ContainerStartData containerStart) - throws IOException { - HistoryFileWriter hfWriter = - getHistoryFileWriter(containerStart.getContainerId() - .getApplicationAttemptId().getApplicationId()); - assert containerStart instanceof ContainerStartDataPBImpl; - try { - hfWriter.writeHistoryData(new HistoryDataKey(containerStart - .getContainerId().toString(), START_DATA_SUFFIX), - ((ContainerStartDataPBImpl) containerStart).getProto().toByteArray()); - LOG.info("Start information of container " - + containerStart.getContainerId() + " is written"); - } catch (IOException e) { - LOG.error("Error when writing start information of container " - + containerStart.getContainerId(), e); - throw e; - } - } - - @Override - public void containerFinished(ContainerFinishData containerFinish) - throws IOException { - HistoryFileWriter hfWriter = - getHistoryFileWriter(containerFinish.getContainerId() - .getApplicationAttemptId().getApplicationId()); - assert containerFinish instanceof ContainerFinishDataPBImpl; - try { - hfWriter.writeHistoryData(new HistoryDataKey(containerFinish - .getContainerId().toString(), FINISH_DATA_SUFFIX), - ((ContainerFinishDataPBImpl) containerFinish).getProto().toByteArray()); - LOG.info("Finish information of container " - + containerFinish.getContainerId() + " is written"); - } catch (IOException e) { - LOG.error("Error when writing finish information of container " - + containerFinish.getContainerId(), e); - } - } - - private static ApplicationStartData parseApplicationStartData(byte[] value) - throws InvalidProtocolBufferException { - return new ApplicationStartDataPBImpl( - ApplicationStartDataProto.parseFrom(value)); - } - - private static ApplicationFinishData parseApplicationFinishData(byte[] value) - throws InvalidProtocolBufferException { - return new ApplicationFinishDataPBImpl( - ApplicationFinishDataProto.parseFrom(value)); - } - - private static ApplicationAttemptStartData parseApplicationAttemptStartData( - byte[] value) throws InvalidProtocolBufferException { - return new ApplicationAttemptStartDataPBImpl( - ApplicationAttemptStartDataProto.parseFrom(value)); - } - - private static ApplicationAttemptFinishData - parseApplicationAttemptFinishData(byte[] value) - throws InvalidProtocolBufferException { - return new ApplicationAttemptFinishDataPBImpl( - ApplicationAttemptFinishDataProto.parseFrom(value)); - } - - private static ContainerStartData parseContainerStartData(byte[] value) - throws InvalidProtocolBufferException { - return new ContainerStartDataPBImpl( - ContainerStartDataProto.parseFrom(value)); - } - - private static ContainerFinishData parseContainerFinishData(byte[] value) - throws InvalidProtocolBufferException { - return new ContainerFinishDataPBImpl( - ContainerFinishDataProto.parseFrom(value)); - } - - private static void mergeApplicationHistoryData( - ApplicationHistoryData historyData, ApplicationStartData startData) { - historyData.setApplicationName(startData.getApplicationName()); - historyData.setApplicationType(startData.getApplicationType()); - historyData.setQueue(startData.getQueue()); - historyData.setUser(startData.getUser()); - historyData.setSubmitTime(startData.getSubmitTime()); - historyData.setStartTime(startData.getStartTime()); - } - - private static void mergeApplicationHistoryData( - ApplicationHistoryData historyData, ApplicationFinishData finishData) { - historyData.setFinishTime(finishData.getFinishTime()); - historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); - historyData.setFinalApplicationStatus(finishData - .getFinalApplicationStatus()); - historyData.setYarnApplicationState(finishData.getYarnApplicationState()); - } - - private static void mergeApplicationAttemptHistoryData( - ApplicationAttemptHistoryData historyData, - ApplicationAttemptStartData startData) { - historyData.setHost(startData.getHost()); - historyData.setRPCPort(startData.getRPCPort()); - historyData.setMasterContainerId(startData.getMasterContainerId()); - } - - private static void mergeApplicationAttemptHistoryData( - ApplicationAttemptHistoryData historyData, - ApplicationAttemptFinishData finishData) { - historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); - historyData.setTrackingURL(finishData.getTrackingURL()); - historyData.setFinalApplicationStatus(finishData - .getFinalApplicationStatus()); - historyData.setYarnApplicationAttemptState(finishData - .getYarnApplicationAttemptState()); - } - - private static void mergeContainerHistoryData( - ContainerHistoryData historyData, ContainerStartData startData) { - historyData.setAllocatedResource(startData.getAllocatedResource()); - historyData.setAssignedNode(startData.getAssignedNode()); - historyData.setPriority(startData.getPriority()); - historyData.setStartTime(startData.getStartTime()); - } - - private static void mergeContainerHistoryData( - ContainerHistoryData historyData, ContainerFinishData finishData) { - historyData.setFinishTime(finishData.getFinishTime()); - historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); - historyData.setContainerExitStatus(finishData.getContainerExitStatus()); - historyData.setContainerState(finishData.getContainerState()); - } - - private HistoryFileWriter getHistoryFileWriter(ApplicationId appId) - throws IOException { - HistoryFileWriter hfWriter = outstandingWriters.get(appId); - if (hfWriter == null) { - throw new IOException("History file of application " + appId - + " is not opened"); - } - return hfWriter; - } - - private HistoryFileReader getHistoryFileReader(ApplicationId appId) - throws IOException { - Path applicationHistoryFile = new Path(rootDirPath, appId.toString()); - if (!fs.exists(applicationHistoryFile)) { - throw new IOException("History file for application " + appId - + " is not found"); - } - // The history file is still under writing - if (outstandingWriters.containsKey(appId)) { - throw new IOException("History file for application " + appId - + " is under writing"); - } - return new HistoryFileReader(applicationHistoryFile); - } - - private class HistoryFileReader { - - private class Entry { - - private HistoryDataKey key; - private byte[] value; - - public Entry(HistoryDataKey key, byte[] value) { - this.key = key; - this.value = value; - } - } - - private TFile.Reader reader; - private TFile.Reader.Scanner scanner; - FSDataInputStream fsdis; - - public HistoryFileReader(Path historyFile) throws IOException { - fsdis = fs.open(historyFile); - reader = - new TFile.Reader(fsdis, fs.getFileStatus(historyFile).getLen(), - getConfig()); - reset(); - } - - public boolean hasNext() { - return !scanner.atEnd(); - } - - public Entry next() throws IOException { - TFile.Reader.Scanner.Entry entry = scanner.entry(); - DataInputStream dis = entry.getKeyStream(); - HistoryDataKey key = new HistoryDataKey(); - key.readFields(dis); - dis = entry.getValueStream(); - byte[] value = new byte[entry.getValueLength()]; - dis.read(value); - scanner.advance(); - return new Entry(key, value); - } - - public void reset() throws IOException { - IOUtils.cleanup(LOG, scanner); - scanner = reader.createScanner(); - } - - public void close() { - IOUtils.cleanup(LOG, scanner, reader, fsdis); - } - - } - - private class HistoryFileWriter { - - private FSDataOutputStream fsdos; - private TFile.Writer writer; - - public HistoryFileWriter(Path historyFile) throws IOException { - if (fs.exists(historyFile)) { - fsdos = fs.append(historyFile); - } else { - fsdos = fs.create(historyFile); - } - fs.setPermission(historyFile, HISTORY_FILE_UMASK); - writer = - new TFile.Writer(fsdos, MIN_BLOCK_SIZE, getConfig().get( - YarnConfiguration.FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE, - YarnConfiguration.DEFAULT_FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE), null, - getConfig()); - } - - public synchronized void close() { - IOUtils.cleanup(LOG, writer, fsdos); - } - - public synchronized void writeHistoryData(HistoryDataKey key, byte[] value) - throws IOException { - DataOutputStream dos = null; - try { - dos = writer.prepareAppendKey(-1); - key.write(dos); - } finally { - IOUtils.cleanup(LOG, dos); - } - try { - dos = writer.prepareAppendValue(value.length); - dos.write(value); - } finally { - IOUtils.cleanup(LOG, dos); - } - } - - } - - private static class HistoryDataKey implements Writable { - - private String id; - - private String suffix; - - public HistoryDataKey() { - this(null, null); - } - - public HistoryDataKey(String id, String suffix) { - this.id = id; - this.suffix = suffix; - } - - @Override - public void write(DataOutput out) throws IOException { - out.writeUTF(id); - out.writeUTF(suffix); - } - - @Override - public void readFields(DataInput in) throws IOException { - id = in.readUTF(); - suffix = in.readUTF(); - } - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java deleted file mode 100644 index c226ad3..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java +++ /dev/null @@ -1,274 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; - -/** - * In-memory implementation of {@link ApplicationHistoryStore}. This - * implementation is for test purpose only. If users improperly instantiate it, - * they may encounter reading and writing history data in different memory - * store. - * - */ -@Private -@Unstable -public class MemoryApplicationHistoryStore extends AbstractService implements - ApplicationHistoryStore { - - private final ConcurrentMap applicationData = - new ConcurrentHashMap(); - private final ConcurrentMap> applicationAttemptData = - new ConcurrentHashMap>(); - private final ConcurrentMap> containerData = - new ConcurrentHashMap>(); - - public MemoryApplicationHistoryStore() { - super(MemoryApplicationHistoryStore.class.getName()); - } - - @Override - public Map getAllApplications() { - return new HashMap(applicationData); - } - - @Override - public ApplicationHistoryData getApplication(ApplicationId appId) { - return applicationData.get(appId); - } - - @Override - public Map - getApplicationAttempts(ApplicationId appId) { - ConcurrentMap subMap = - applicationAttemptData.get(appId); - if (subMap == null) { - return Collections - . emptyMap(); - } else { - return new HashMap( - subMap); - } - } - - @Override - public ApplicationAttemptHistoryData getApplicationAttempt( - ApplicationAttemptId appAttemptId) { - ConcurrentMap subMap = - applicationAttemptData.get(appAttemptId.getApplicationId()); - if (subMap == null) { - return null; - } else { - return subMap.get(appAttemptId); - } - } - - @Override - public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) { - ApplicationAttemptHistoryData appAttempt = - getApplicationAttempt(appAttemptId); - if (appAttempt == null || appAttempt.getMasterContainerId() == null) { - return null; - } else { - return getContainer(appAttempt.getMasterContainerId()); - } - } - - @Override - public ContainerHistoryData getContainer(ContainerId containerId) { - Map subMap = - containerData.get(containerId.getApplicationAttemptId()); - if (subMap == null) { - return null; - } else { - return subMap.get(containerId); - } - } - - @Override - public Map getContainers( - ApplicationAttemptId appAttemptId) throws IOException { - ConcurrentMap subMap = - containerData.get(appAttemptId); - if (subMap == null) { - return Collections. emptyMap(); - } else { - return new HashMap(subMap); - } - } - - @Override - public void applicationStarted(ApplicationStartData appStart) - throws IOException { - ApplicationHistoryData oldData = - applicationData.putIfAbsent(appStart.getApplicationId(), - ApplicationHistoryData.newInstance(appStart.getApplicationId(), - appStart.getApplicationName(), appStart.getApplicationType(), - appStart.getQueue(), appStart.getUser(), appStart.getSubmitTime(), - appStart.getStartTime(), Long.MAX_VALUE, null, null, null)); - if (oldData != null) { - throw new IOException("The start information of application " - + appStart.getApplicationId() + " is already stored."); - } - } - - @Override - public void applicationFinished(ApplicationFinishData appFinish) - throws IOException { - ApplicationHistoryData data = - applicationData.get(appFinish.getApplicationId()); - if (data == null) { - throw new IOException("The finish information of application " - + appFinish.getApplicationId() + " is stored before the start" - + " information."); - } - // Make the assumption that YarnApplicationState should not be null if - // the finish information is already recorded - if (data.getYarnApplicationState() != null) { - throw new IOException("The finish information of application " - + appFinish.getApplicationId() + " is already stored."); - } - data.setFinishTime(appFinish.getFinishTime()); - data.setDiagnosticsInfo(appFinish.getDiagnosticsInfo()); - data.setFinalApplicationStatus(appFinish.getFinalApplicationStatus()); - data.setYarnApplicationState(appFinish.getYarnApplicationState()); - } - - @Override - public void applicationAttemptStarted( - ApplicationAttemptStartData appAttemptStart) throws IOException { - ConcurrentMap subMap = - getSubMap(appAttemptStart.getApplicationAttemptId().getApplicationId()); - ApplicationAttemptHistoryData oldData = - subMap.putIfAbsent(appAttemptStart.getApplicationAttemptId(), - ApplicationAttemptHistoryData.newInstance( - appAttemptStart.getApplicationAttemptId(), - appAttemptStart.getHost(), appAttemptStart.getRPCPort(), - appAttemptStart.getMasterContainerId(), null, null, null, null)); - if (oldData != null) { - throw new IOException("The start information of application attempt " - + appAttemptStart.getApplicationAttemptId() + " is already stored."); - } - } - - @Override - public void applicationAttemptFinished( - ApplicationAttemptFinishData appAttemptFinish) throws IOException { - ConcurrentMap subMap = - getSubMap(appAttemptFinish.getApplicationAttemptId().getApplicationId()); - ApplicationAttemptHistoryData data = - subMap.get(appAttemptFinish.getApplicationAttemptId()); - if (data == null) { - throw new IOException("The finish information of application attempt " - + appAttemptFinish.getApplicationAttemptId() + " is stored before" - + " the start information."); - } - // Make the assumption that YarnApplicationAttemptState should not be null - // if the finish information is already recorded - if (data.getYarnApplicationAttemptState() != null) { - throw new IOException("The finish information of application attempt " - + appAttemptFinish.getApplicationAttemptId() + " is already stored."); - } - data.setTrackingURL(appAttemptFinish.getTrackingURL()); - data.setDiagnosticsInfo(appAttemptFinish.getDiagnosticsInfo()); - data - .setFinalApplicationStatus(appAttemptFinish.getFinalApplicationStatus()); - data.setYarnApplicationAttemptState(appAttemptFinish - .getYarnApplicationAttemptState()); - } - - private ConcurrentMap - getSubMap(ApplicationId appId) { - applicationAttemptData - .putIfAbsent( - appId, - new ConcurrentHashMap()); - return applicationAttemptData.get(appId); - } - - @Override - public void containerStarted(ContainerStartData containerStart) - throws IOException { - ConcurrentMap subMap = - getSubMap(containerStart.getContainerId().getApplicationAttemptId()); - ContainerHistoryData oldData = - subMap.putIfAbsent(containerStart.getContainerId(), - ContainerHistoryData.newInstance(containerStart.getContainerId(), - containerStart.getAllocatedResource(), - containerStart.getAssignedNode(), containerStart.getPriority(), - containerStart.getStartTime(), Long.MAX_VALUE, null, - Integer.MAX_VALUE, null)); - if (oldData != null) { - throw new IOException("The start information of container " - + containerStart.getContainerId() + " is already stored."); - } - } - - @Override - public void containerFinished(ContainerFinishData containerFinish) - throws IOException { - ConcurrentMap subMap = - getSubMap(containerFinish.getContainerId().getApplicationAttemptId()); - ContainerHistoryData data = subMap.get(containerFinish.getContainerId()); - if (data == null) { - throw new IOException("The finish information of container " - + containerFinish.getContainerId() + " is stored before" - + " the start information."); - } - // Make the assumption that ContainerState should not be null if - // the finish information is already recorded - if (data.getContainerState() != null) { - throw new IOException("The finish information of container " - + containerFinish.getContainerId() + " is already stored."); - } - data.setFinishTime(containerFinish.getFinishTime()); - data.setDiagnosticsInfo(containerFinish.getDiagnosticsInfo()); - data.setContainerExitStatus(containerFinish.getContainerExitStatus()); - data.setContainerState(containerFinish.getContainerState()); - } - - private ConcurrentMap getSubMap( - ApplicationAttemptId appAttemptId) { - containerData.putIfAbsent(appAttemptId, - new ConcurrentHashMap()); - return containerData.get(appAttemptId); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/NullApplicationHistoryStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/NullApplicationHistoryStore.java deleted file mode 100644 index 3660c10..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/NullApplicationHistoryStore.java +++ /dev/null @@ -1,127 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; - -/** - * Dummy implementation of {@link ApplicationHistoryStore}. If this - * implementation is used, no history data will be persisted. - * - */ -@Unstable -@Private -public class NullApplicationHistoryStore extends AbstractService implements - ApplicationHistoryStore { - - public NullApplicationHistoryStore() { - super(NullApplicationHistoryStore.class.getName()); - } - - @Override - public void applicationStarted(ApplicationStartData appStart) - throws IOException { - } - - @Override - public void applicationFinished(ApplicationFinishData appFinish) - throws IOException { - } - - @Override - public void applicationAttemptStarted( - ApplicationAttemptStartData appAttemptStart) throws IOException { - } - - @Override - public void applicationAttemptFinished( - ApplicationAttemptFinishData appAttemptFinish) throws IOException { - } - - @Override - public void containerStarted(ContainerStartData containerStart) - throws IOException { - } - - @Override - public void containerFinished(ContainerFinishData containerFinish) - throws IOException { - } - - @Override - public ApplicationHistoryData getApplication(ApplicationId appId) - throws IOException { - return null; - } - - @Override - public Map getAllApplications() - throws IOException { - return Collections.emptyMap(); - } - - @Override - public Map - getApplicationAttempts(ApplicationId appId) throws IOException { - return Collections.emptyMap(); - } - - @Override - public ApplicationAttemptHistoryData getApplicationAttempt( - ApplicationAttemptId appAttemptId) throws IOException { - return null; - } - - @Override - public ContainerHistoryData getContainer(ContainerId containerId) - throws IOException { - return null; - } - - @Override - public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) - throws IOException { - return null; - } - - @Override - public Map getContainers( - ApplicationAttemptId appAttemptId) throws IOException { - return Collections.emptyMap(); - } - -} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptFinishData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptFinishData.java deleted file mode 100644 index 7ba51af..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptFinishData.java +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; -import org.apache.hadoop.yarn.util.Records; - -/** - * The class contains the fields that can be determined when - * RMAppAttempt finishes, and that need to be stored persistently. - */ -@Public -@Unstable -public abstract class ApplicationAttemptFinishData { - - @Public - @Unstable - public static ApplicationAttemptFinishData newInstance( - ApplicationAttemptId appAttemptId, String diagnosticsInfo, - String trackingURL, FinalApplicationStatus finalApplicationStatus, - YarnApplicationAttemptState yarnApplicationAttemptState) { - ApplicationAttemptFinishData appAttemptFD = - Records.newRecord(ApplicationAttemptFinishData.class); - appAttemptFD.setApplicationAttemptId(appAttemptId); - appAttemptFD.setDiagnosticsInfo(diagnosticsInfo); - appAttemptFD.setTrackingURL(trackingURL); - appAttemptFD.setFinalApplicationStatus(finalApplicationStatus); - appAttemptFD.setYarnApplicationAttemptState(yarnApplicationAttemptState); - return appAttemptFD; - } - - @Public - @Unstable - public abstract ApplicationAttemptId getApplicationAttemptId(); - - @Public - @Unstable - public abstract void setApplicationAttemptId( - ApplicationAttemptId applicationAttemptId); - - @Public - @Unstable - public abstract String getTrackingURL(); - - @Public - @Unstable - public abstract void setTrackingURL(String trackingURL); - - @Public - @Unstable - public abstract String getDiagnosticsInfo(); - - @Public - @Unstable - public abstract void setDiagnosticsInfo(String diagnosticsInfo); - - @Public - @Unstable - public abstract FinalApplicationStatus getFinalApplicationStatus(); - - @Public - @Unstable - public abstract void setFinalApplicationStatus( - FinalApplicationStatus finalApplicationStatus); - - @Public - @Unstable - public abstract YarnApplicationAttemptState getYarnApplicationAttemptState(); - - @Public - @Unstable - public abstract void setYarnApplicationAttemptState( - YarnApplicationAttemptState yarnApplicationAttemptState); - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptHistoryData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptHistoryData.java deleted file mode 100644 index b759ab1..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptHistoryData.java +++ /dev/null @@ -1,171 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; - -/** - * The class contains all the fields that are stored persistently for - * RMAppAttempt. - */ -@Public -@Unstable -public class ApplicationAttemptHistoryData { - - private ApplicationAttemptId applicationAttemptId; - - private String host; - - private int rpcPort; - - private String trackingURL; - - private String diagnosticsInfo; - - private FinalApplicationStatus finalApplicationStatus; - - private ContainerId masterContainerId; - - private YarnApplicationAttemptState yarnApplicationAttemptState; - - @Public - @Unstable - public static ApplicationAttemptHistoryData newInstance( - ApplicationAttemptId appAttemptId, String host, int rpcPort, - ContainerId masterContainerId, String diagnosticsInfo, - String trackingURL, FinalApplicationStatus finalApplicationStatus, - YarnApplicationAttemptState yarnApplicationAttemptState) { - ApplicationAttemptHistoryData appAttemptHD = - new ApplicationAttemptHistoryData(); - appAttemptHD.setApplicationAttemptId(appAttemptId); - appAttemptHD.setHost(host); - appAttemptHD.setRPCPort(rpcPort); - appAttemptHD.setMasterContainerId(masterContainerId); - appAttemptHD.setDiagnosticsInfo(diagnosticsInfo); - appAttemptHD.setTrackingURL(trackingURL); - appAttemptHD.setFinalApplicationStatus(finalApplicationStatus); - appAttemptHD.setYarnApplicationAttemptState(yarnApplicationAttemptState); - return appAttemptHD; - } - - @Public - @Unstable - public ApplicationAttemptId getApplicationAttemptId() { - return applicationAttemptId; - } - - @Public - @Unstable - public void - setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) { - this.applicationAttemptId = applicationAttemptId; - } - - @Public - @Unstable - public String getHost() { - return host; - } - - @Public - @Unstable - public void setHost(String host) { - this.host = host; - } - - @Public - @Unstable - public int getRPCPort() { - return rpcPort; - } - - @Public - @Unstable - public void setRPCPort(int rpcPort) { - this.rpcPort = rpcPort; - } - - @Public - @Unstable - public String getTrackingURL() { - return trackingURL; - } - - @Public - @Unstable - public void setTrackingURL(String trackingURL) { - this.trackingURL = trackingURL; - } - - @Public - @Unstable - public String getDiagnosticsInfo() { - return diagnosticsInfo; - } - - @Public - @Unstable - public void setDiagnosticsInfo(String diagnosticsInfo) { - this.diagnosticsInfo = diagnosticsInfo; - } - - @Public - @Unstable - public FinalApplicationStatus getFinalApplicationStatus() { - return finalApplicationStatus; - } - - @Public - @Unstable - public void setFinalApplicationStatus( - FinalApplicationStatus finalApplicationStatus) { - this.finalApplicationStatus = finalApplicationStatus; - } - - @Public - @Unstable - public ContainerId getMasterContainerId() { - return masterContainerId; - } - - @Public - @Unstable - public void setMasterContainerId(ContainerId masterContainerId) { - this.masterContainerId = masterContainerId; - } - - @Public - @Unstable - public YarnApplicationAttemptState getYarnApplicationAttemptState() { - return yarnApplicationAttemptState; - } - - @Public - @Unstable - public void setYarnApplicationAttemptState( - YarnApplicationAttemptState yarnApplicationAttemptState) { - this.yarnApplicationAttemptState = yarnApplicationAttemptState; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptStartData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptStartData.java deleted file mode 100644 index 7ca43fa..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptStartData.java +++ /dev/null @@ -1,82 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.util.Records; - -/** - * The class contains the fields that can be determined when - * RMAppAttempt starts, and that need to be stored persistently. - */ -@Public -@Unstable -public abstract class ApplicationAttemptStartData { - - @Public - @Unstable - public static ApplicationAttemptStartData newInstance( - ApplicationAttemptId appAttemptId, String host, int rpcPort, - ContainerId masterContainerId) { - ApplicationAttemptStartData appAttemptSD = - Records.newRecord(ApplicationAttemptStartData.class); - appAttemptSD.setApplicationAttemptId(appAttemptId); - appAttemptSD.setHost(host); - appAttemptSD.setRPCPort(rpcPort); - appAttemptSD.setMasterContainerId(masterContainerId); - return appAttemptSD; - } - - @Public - @Unstable - public abstract ApplicationAttemptId getApplicationAttemptId(); - - @Public - @Unstable - public abstract void setApplicationAttemptId( - ApplicationAttemptId applicationAttemptId); - - @Public - @Unstable - public abstract String getHost(); - - @Public - @Unstable - public abstract void setHost(String host); - - @Public - @Unstable - public abstract int getRPCPort(); - - @Public - @Unstable - public abstract void setRPCPort(int rpcPort); - - @Public - @Unstable - public abstract ContainerId getMasterContainerId(); - - @Public - @Unstable - public abstract void setMasterContainerId(ContainerId masterContainerId); - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationFinishData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationFinishData.java deleted file mode 100644 index 997fa6c..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationFinishData.java +++ /dev/null @@ -1,94 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.util.Records; - -/** - * The class contains the fields that can be determined when RMApp - * finishes, and that need to be stored persistently. - */ -@Public -@Unstable -public abstract class ApplicationFinishData { - - @Public - @Unstable - public static ApplicationFinishData newInstance(ApplicationId applicationId, - long finishTime, String diagnosticsInfo, - FinalApplicationStatus finalApplicationStatus, - YarnApplicationState yarnApplicationState) { - ApplicationFinishData appFD = - Records.newRecord(ApplicationFinishData.class); - appFD.setApplicationId(applicationId); - appFD.setFinishTime(finishTime); - appFD.setDiagnosticsInfo(diagnosticsInfo); - appFD.setFinalApplicationStatus(finalApplicationStatus); - appFD.setYarnApplicationState(yarnApplicationState); - return appFD; - } - - @Public - @Unstable - public abstract ApplicationId getApplicationId(); - - @Public - @Unstable - public abstract void setApplicationId(ApplicationId applicationId); - - @Public - @Unstable - public abstract long getFinishTime(); - - @Public - @Unstable - public abstract void setFinishTime(long finishTime); - - @Public - @Unstable - public abstract String getDiagnosticsInfo(); - - @Public - @Unstable - public abstract void setDiagnosticsInfo(String diagnosticsInfo); - - @Public - @Unstable - public abstract FinalApplicationStatus getFinalApplicationStatus(); - - @Public - @Unstable - public abstract void setFinalApplicationStatus( - FinalApplicationStatus finalApplicationStatus); - - @Public - @Unstable - public abstract YarnApplicationState getYarnApplicationState(); - - @Public - @Unstable - public abstract void setYarnApplicationState( - YarnApplicationState yarnApplicationState); - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationHistoryData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationHistoryData.java deleted file mode 100644 index b7d16f3..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationHistoryData.java +++ /dev/null @@ -1,213 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; - -/** - * The class contains all the fields that are stored persistently for - * RMApp. - */ -@Public -@Unstable -public class ApplicationHistoryData { - - private ApplicationId applicationId; - - private String applicationName; - - private String applicationType; - - private String user; - - private String queue; - - private long submitTime; - - private long startTime; - - private long finishTime; - - private String diagnosticsInfo; - - private FinalApplicationStatus finalApplicationStatus; - - private YarnApplicationState yarnApplicationState; - - @Public - @Unstable - public static ApplicationHistoryData newInstance(ApplicationId applicationId, - String applicationName, String applicationType, String queue, - String user, long submitTime, long startTime, long finishTime, - String diagnosticsInfo, FinalApplicationStatus finalApplicationStatus, - YarnApplicationState yarnApplicationState) { - ApplicationHistoryData appHD = new ApplicationHistoryData(); - appHD.setApplicationId(applicationId); - appHD.setApplicationName(applicationName); - appHD.setApplicationType(applicationType); - appHD.setQueue(queue); - appHD.setUser(user); - appHD.setSubmitTime(submitTime); - appHD.setStartTime(startTime); - appHD.setFinishTime(finishTime); - appHD.setDiagnosticsInfo(diagnosticsInfo); - appHD.setFinalApplicationStatus(finalApplicationStatus); - appHD.setYarnApplicationState(yarnApplicationState); - return appHD; - } - - @Public - @Unstable - public ApplicationId getApplicationId() { - return applicationId; - } - - @Public - @Unstable - public void setApplicationId(ApplicationId applicationId) { - this.applicationId = applicationId; - } - - @Public - @Unstable - public String getApplicationName() { - return applicationName; - } - - @Public - @Unstable - public void setApplicationName(String applicationName) { - this.applicationName = applicationName; - } - - @Public - @Unstable - public String getApplicationType() { - return applicationType; - } - - @Public - @Unstable - public void setApplicationType(String applicationType) { - this.applicationType = applicationType; - } - - @Public - @Unstable - public String getUser() { - return user; - } - - @Public - @Unstable - public void setUser(String user) { - this.user = user; - } - - @Public - @Unstable - public String getQueue() { - return queue; - } - - @Public - @Unstable - public void setQueue(String queue) { - this.queue = queue; - } - - @Public - @Unstable - public long getSubmitTime() { - return submitTime; - } - - @Public - @Unstable - public void setSubmitTime(long submitTime) { - this.submitTime = submitTime; - } - - @Public - @Unstable - public long getStartTime() { - return startTime; - } - - @Public - @Unstable - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - @Public - @Unstable - public long getFinishTime() { - return finishTime; - } - - @Public - @Unstable - public void setFinishTime(long finishTime) { - this.finishTime = finishTime; - } - - @Public - @Unstable - public String getDiagnosticsInfo() { - return diagnosticsInfo; - } - - @Public - @Unstable - public void setDiagnosticsInfo(String diagnosticsInfo) { - this.diagnosticsInfo = diagnosticsInfo; - } - - @Public - @Unstable - public FinalApplicationStatus getFinalApplicationStatus() { - return finalApplicationStatus; - } - - @Public - @Unstable - public void setFinalApplicationStatus( - FinalApplicationStatus finalApplicationStatus) { - this.finalApplicationStatus = finalApplicationStatus; - } - - @Public - @Unstable - public YarnApplicationState getYarnApplicationState() { - return this.yarnApplicationState; - } - - @Public - @Unstable - public void - setYarnApplicationState(YarnApplicationState yarnApplicationState) { - this.yarnApplicationState = yarnApplicationState; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java deleted file mode 100644 index 6bc1323..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.Records; - -/** - * The class contains the fields that can be determined when RMApp - * starts, and that need to be stored persistently. - */ -@Public -@Unstable -public abstract class ApplicationStartData { - - @Public - @Unstable - public static ApplicationStartData newInstance(ApplicationId applicationId, - String applicationName, String applicationType, String queue, - String user, long submitTime, long startTime) { - ApplicationStartData appSD = Records.newRecord(ApplicationStartData.class); - appSD.setApplicationId(applicationId); - appSD.setApplicationName(applicationName); - appSD.setApplicationType(applicationType); - appSD.setQueue(queue); - appSD.setUser(user); - appSD.setSubmitTime(submitTime); - appSD.setStartTime(startTime); - return appSD; - } - - @Public - @Unstable - public abstract ApplicationId getApplicationId(); - - @Public - @Unstable - public abstract void setApplicationId(ApplicationId applicationId); - - @Public - @Unstable - public abstract String getApplicationName(); - - @Public - @Unstable - public abstract void setApplicationName(String applicationName); - - @Public - @Unstable - public abstract String getApplicationType(); - - @Public - @Unstable - public abstract void setApplicationType(String applicationType); - - @Public - @Unstable - public abstract String getUser(); - - @Public - @Unstable - public abstract void setUser(String user); - - @Public - @Unstable - public abstract String getQueue(); - - @Public - @Unstable - public abstract void setQueue(String queue); - - @Public - @Unstable - public abstract long getSubmitTime(); - - @Public - @Unstable - public abstract void setSubmitTime(long submitTime); - - @Public - @Unstable - public abstract long getStartTime(); - - @Public - @Unstable - public abstract void setStartTime(long startTime); - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java deleted file mode 100644 index 5eb9ddb..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.util.Records; - -/** - * The class contains the fields that can be determined when - * RMContainer finishes, and that need to be stored persistently. - */ -@Public -@Unstable -public abstract class ContainerFinishData { - - @Public - @Unstable - public static ContainerFinishData newInstance(ContainerId containerId, - long finishTime, String diagnosticsInfo, int containerExitCode, - ContainerState containerState) { - ContainerFinishData containerFD = - Records.newRecord(ContainerFinishData.class); - containerFD.setContainerId(containerId); - containerFD.setFinishTime(finishTime); - containerFD.setDiagnosticsInfo(diagnosticsInfo); - containerFD.setContainerExitStatus(containerExitCode); - containerFD.setContainerState(containerState); - return containerFD; - } - - @Public - @Unstable - public abstract ContainerId getContainerId(); - - @Public - @Unstable - public abstract void setContainerId(ContainerId containerId); - - @Public - @Unstable - public abstract long getFinishTime(); - - @Public - @Unstable - public abstract void setFinishTime(long finishTime); - - @Public - @Unstable - public abstract String getDiagnosticsInfo(); - - @Public - @Unstable - public abstract void setDiagnosticsInfo(String diagnosticsInfo); - - @Public - @Unstable - public abstract int getContainerExitStatus(); - - @Public - @Unstable - public abstract void setContainerExitStatus(int containerExitStatus); - - @Public - @Unstable - public abstract ContainerState getContainerState(); - - @Public - @Unstable - public abstract void setContainerState(ContainerState containerState); - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerHistoryData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerHistoryData.java deleted file mode 100644 index e606185..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerHistoryData.java +++ /dev/null @@ -1,182 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; - -/** - * The class contains all the fields that are stored persistently for - * RMContainer. - */ -@Public -@Unstable -public class ContainerHistoryData { - - private ContainerId containerId; - - private Resource allocatedResource; - - private NodeId assignedNode; - - private Priority priority; - - private long startTime; - - private long finishTime; - - private String diagnosticsInfo; - - private int containerExitStatus; - - private ContainerState containerState; - - @Public - @Unstable - public static ContainerHistoryData newInstance(ContainerId containerId, - Resource allocatedResource, NodeId assignedNode, Priority priority, - long startTime, long finishTime, String diagnosticsInfo, - int containerExitCode, ContainerState containerState) { - ContainerHistoryData containerHD = new ContainerHistoryData(); - containerHD.setContainerId(containerId); - containerHD.setAllocatedResource(allocatedResource); - containerHD.setAssignedNode(assignedNode); - containerHD.setPriority(priority); - containerHD.setStartTime(startTime); - containerHD.setFinishTime(finishTime); - containerHD.setDiagnosticsInfo(diagnosticsInfo); - containerHD.setContainerExitStatus(containerExitCode); - containerHD.setContainerState(containerState); - return containerHD; - } - - @Public - @Unstable - public ContainerId getContainerId() { - return containerId; - } - - @Public - @Unstable - public void setContainerId(ContainerId containerId) { - this.containerId = containerId; - } - - @Public - @Unstable - public Resource getAllocatedResource() { - return allocatedResource; - } - - @Public - @Unstable - public void setAllocatedResource(Resource resource) { - this.allocatedResource = resource; - } - - @Public - @Unstable - public NodeId getAssignedNode() { - return assignedNode; - } - - @Public - @Unstable - public void setAssignedNode(NodeId nodeId) { - this.assignedNode = nodeId; - } - - @Public - @Unstable - public Priority getPriority() { - return priority; - } - - @Public - @Unstable - public void setPriority(Priority priority) { - this.priority = priority; - } - - @Public - @Unstable - public long getStartTime() { - return startTime; - } - - @Public - @Unstable - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - @Public - @Unstable - public long getFinishTime() { - return finishTime; - } - - @Public - @Unstable - public void setFinishTime(long finishTime) { - this.finishTime = finishTime; - } - - @Public - @Unstable - public String getDiagnosticsInfo() { - return diagnosticsInfo; - } - - @Public - @Unstable - public void setDiagnosticsInfo(String diagnosticsInfo) { - this.diagnosticsInfo = diagnosticsInfo; - } - - @Public - @Unstable - public int getContainerExitStatus() { - return containerExitStatus; - } - - @Public - @Unstable - public void setContainerExitStatus(int containerExitStatus) { - this.containerExitStatus = containerExitStatus; - } - - @Public - @Unstable - public ContainerState getContainerState() { - return containerState; - } - - @Public - @Unstable - public void setContainerState(ContainerState containerState) { - this.containerState = containerState; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerStartData.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerStartData.java deleted file mode 100644 index 0c6dd81..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerStartData.java +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.util.Records; - -/** - * The class contains the fields that can be determined when - * RMContainer starts, and that need to be stored persistently. - */ -@Public -@Unstable -public abstract class ContainerStartData { - - @Public - @Unstable - public static ContainerStartData newInstance(ContainerId containerId, - Resource allocatedResource, NodeId assignedNode, Priority priority, - long startTime) { - ContainerStartData containerSD = - Records.newRecord(ContainerStartData.class); - containerSD.setContainerId(containerId); - containerSD.setAllocatedResource(allocatedResource); - containerSD.setAssignedNode(assignedNode); - containerSD.setPriority(priority); - containerSD.setStartTime(startTime); - return containerSD; - } - - @Public - @Unstable - public abstract ContainerId getContainerId(); - - @Public - @Unstable - public abstract void setContainerId(ContainerId containerId); - - @Public - @Unstable - public abstract Resource getAllocatedResource(); - - @Public - @Unstable - public abstract void setAllocatedResource(Resource resource); - - @Public - @Unstable - public abstract NodeId getAssignedNode(); - - @Public - @Unstable - public abstract void setAssignedNode(NodeId nodeId); - - @Public - @Unstable - public abstract Priority getPriority(); - - @Public - @Unstable - public abstract void setPriority(Priority priority); - - @Public - @Unstable - public abstract long getStartTime(); - - @Public - @Unstable - public abstract void setStartTime(long startTime); - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationAttemptFinishDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationAttemptFinishDataPBImpl.java deleted file mode 100644 index 945c12f..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationAttemptFinishDataPBImpl.java +++ /dev/null @@ -1,239 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto; -import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; - -import com.google.protobuf.TextFormat; - -public class ApplicationAttemptFinishDataPBImpl extends - ApplicationAttemptFinishData { - - ApplicationAttemptFinishDataProto proto = ApplicationAttemptFinishDataProto - .getDefaultInstance(); - ApplicationAttemptFinishDataProto.Builder builder = null; - boolean viaProto = false; - - public ApplicationAttemptFinishDataPBImpl() { - builder = ApplicationAttemptFinishDataProto.newBuilder(); - } - - public ApplicationAttemptFinishDataPBImpl( - ApplicationAttemptFinishDataProto proto) { - this.proto = proto; - viaProto = true; - } - - private ApplicationAttemptId applicationAttemptId; - - @Override - public ApplicationAttemptId getApplicationAttemptId() { - if (this.applicationAttemptId != null) { - return this.applicationAttemptId; - } - ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationAttemptId()) { - return null; - } - this.applicationAttemptId = - convertFromProtoFormat(p.getApplicationAttemptId()); - return this.applicationAttemptId; - } - - @Override - public void - setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) { - maybeInitBuilder(); - if (applicationAttemptId == null) { - builder.clearApplicationAttemptId(); - } - this.applicationAttemptId = applicationAttemptId; - } - - @Override - public String getTrackingURL() { - ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasTrackingUrl()) { - return null; - } - return p.getTrackingUrl(); - } - - @Override - public void setTrackingURL(String trackingURL) { - maybeInitBuilder(); - if (trackingURL == null) { - builder.clearTrackingUrl(); - return; - } - builder.setTrackingUrl(trackingURL); - } - - @Override - public String getDiagnosticsInfo() { - ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasDiagnosticsInfo()) { - return null; - } - return p.getDiagnosticsInfo(); - } - - @Override - public void setDiagnosticsInfo(String diagnosticsInfo) { - maybeInitBuilder(); - if (diagnosticsInfo == null) { - builder.clearDiagnosticsInfo(); - return; - } - builder.setDiagnosticsInfo(diagnosticsInfo); - } - - @Override - public FinalApplicationStatus getFinalApplicationStatus() { - ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasFinalApplicationStatus()) { - return null; - } - return convertFromProtoFormat(p.getFinalApplicationStatus()); - } - - @Override - public void setFinalApplicationStatus( - FinalApplicationStatus finalApplicationStatus) { - maybeInitBuilder(); - if (finalApplicationStatus == null) { - builder.clearFinalApplicationStatus(); - return; - } - builder - .setFinalApplicationStatus(convertToProtoFormat(finalApplicationStatus)); - } - - @Override - public YarnApplicationAttemptState getYarnApplicationAttemptState() { - ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasYarnApplicationAttemptState()) { - return null; - } - return convertFromProtoFormat(p.getYarnApplicationAttemptState()); - } - - @Override - public void setYarnApplicationAttemptState(YarnApplicationAttemptState state) { - maybeInitBuilder(); - if (state == null) { - builder.clearYarnApplicationAttemptState(); - return; - } - builder.setYarnApplicationAttemptState(convertToProtoFormat(state)); - } - - public ApplicationAttemptFinishDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - @Override - public int hashCode() { - return getProto().hashCode(); - } - - @Override - public boolean equals(Object other) { - if (other == null) - return false; - if (other.getClass().isAssignableFrom(this.getClass())) { - return this.getProto().equals(this.getClass().cast(other).getProto()); - } - return false; - } - - @Override - public String toString() { - return TextFormat.shortDebugString(getProto()); - } - - private void mergeLocalToBuilder() { - if (this.applicationAttemptId != null - && !((ApplicationAttemptIdPBImpl) this.applicationAttemptId).getProto() - .equals(builder.getApplicationAttemptId())) { - builder - .setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId)); - } - } - - private void mergeLocalToProto() { - if (viaProto) { - maybeInitBuilder(); - } - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ApplicationAttemptFinishDataProto.newBuilder(proto); - } - viaProto = false; - } - - private ApplicationAttemptIdPBImpl convertFromProtoFormat( - ApplicationAttemptIdProto applicationAttemptId) { - return new ApplicationAttemptIdPBImpl(applicationAttemptId); - } - - private ApplicationAttemptIdProto convertToProtoFormat( - ApplicationAttemptId applicationAttemptId) { - return ((ApplicationAttemptIdPBImpl) applicationAttemptId).getProto(); - } - - private FinalApplicationStatus convertFromProtoFormat( - FinalApplicationStatusProto finalApplicationStatus) { - return ProtoUtils.convertFromProtoFormat(finalApplicationStatus); - } - - private FinalApplicationStatusProto convertToProtoFormat( - FinalApplicationStatus finalApplicationStatus) { - return ProtoUtils.convertToProtoFormat(finalApplicationStatus); - } - - private YarnApplicationAttemptStateProto convertToProtoFormat( - YarnApplicationAttemptState state) { - return ProtoUtils.convertToProtoFormat(state); - } - - private YarnApplicationAttemptState convertFromProtoFormat( - YarnApplicationAttemptStateProto yarnApplicationAttemptState) { - return ProtoUtils.convertFromProtoFormat(yarnApplicationAttemptState); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationAttemptStartDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationAttemptStartDataPBImpl.java deleted file mode 100644 index 1f67fc7..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationAttemptStartDataPBImpl.java +++ /dev/null @@ -1,208 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; - -import com.google.protobuf.TextFormat; - -public class ApplicationAttemptStartDataPBImpl extends - ApplicationAttemptStartData { - - ApplicationAttemptStartDataProto proto = ApplicationAttemptStartDataProto - .getDefaultInstance(); - ApplicationAttemptStartDataProto.Builder builder = null; - boolean viaProto = false; - - public ApplicationAttemptStartDataPBImpl() { - builder = ApplicationAttemptStartDataProto.newBuilder(); - } - - public ApplicationAttemptStartDataPBImpl( - ApplicationAttemptStartDataProto proto) { - this.proto = proto; - viaProto = true; - } - - private ApplicationAttemptId applicationAttemptId; - private ContainerId masterContainerId; - - @Override - public ApplicationAttemptId getApplicationAttemptId() { - if (this.applicationAttemptId != null) { - return this.applicationAttemptId; - } - ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationAttemptId()) { - return null; - } - this.applicationAttemptId = - convertFromProtoFormat(p.getApplicationAttemptId()); - return this.applicationAttemptId; - } - - @Override - public void - setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) { - maybeInitBuilder(); - if (applicationAttemptId == null) { - builder.clearApplicationAttemptId(); - } - this.applicationAttemptId = applicationAttemptId; - } - - @Override - public String getHost() { - ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasHost()) { - return null; - } - return p.getHost(); - } - - @Override - public void setHost(String host) { - maybeInitBuilder(); - if (host == null) { - builder.clearHost(); - return; - } - builder.setHost(host); - } - - @Override - public int getRPCPort() { - ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getRpcPort(); - } - - @Override - public void setRPCPort(int rpcPort) { - maybeInitBuilder(); - builder.setRpcPort(rpcPort); - } - - @Override - public ContainerId getMasterContainerId() { - if (this.masterContainerId != null) { - return this.masterContainerId; - } - ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationAttemptId()) { - return null; - } - this.masterContainerId = convertFromProtoFormat(p.getMasterContainerId()); - return this.masterContainerId; - } - - @Override - public void setMasterContainerId(ContainerId masterContainerId) { - maybeInitBuilder(); - if (masterContainerId == null) { - builder.clearMasterContainerId(); - } - this.masterContainerId = masterContainerId; - } - - public ApplicationAttemptStartDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - @Override - public int hashCode() { - return getProto().hashCode(); - } - - @Override - public boolean equals(Object other) { - if (other == null) - return false; - if (other.getClass().isAssignableFrom(this.getClass())) { - return this.getProto().equals(this.getClass().cast(other).getProto()); - } - return false; - } - - @Override - public String toString() { - return TextFormat.shortDebugString(getProto()); - } - - private void mergeLocalToBuilder() { - if (this.applicationAttemptId != null - && !((ApplicationAttemptIdPBImpl) this.applicationAttemptId).getProto() - .equals(builder.getApplicationAttemptId())) { - builder - .setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId)); - } - if (this.masterContainerId != null - && !((ContainerIdPBImpl) this.masterContainerId).getProto().equals( - builder.getMasterContainerId())) { - builder - .setMasterContainerId(convertToProtoFormat(this.masterContainerId)); - } - } - - private void mergeLocalToProto() { - if (viaProto) { - maybeInitBuilder(); - } - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ApplicationAttemptStartDataProto.newBuilder(proto); - } - viaProto = false; - } - - private ApplicationAttemptIdPBImpl convertFromProtoFormat( - ApplicationAttemptIdProto applicationAttemptId) { - return new ApplicationAttemptIdPBImpl(applicationAttemptId); - } - - private ApplicationAttemptIdProto convertToProtoFormat( - ApplicationAttemptId applicationAttemptId) { - return ((ApplicationAttemptIdPBImpl) applicationAttemptId).getProto(); - } - - private ContainerIdPBImpl - convertFromProtoFormat(ContainerIdProto containerId) { - return new ContainerIdPBImpl(containerId); - } - - private ContainerIdProto convertToProtoFormat(ContainerId masterContainerId) { - return ((ContainerIdPBImpl) masterContainerId).getProto(); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java deleted file mode 100644 index 337426d..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java +++ /dev/null @@ -1,226 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto; -import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; - -import com.google.protobuf.TextFormat; - -public class ApplicationFinishDataPBImpl extends ApplicationFinishData { - - ApplicationFinishDataProto proto = ApplicationFinishDataProto - .getDefaultInstance(); - ApplicationFinishDataProto.Builder builder = null; - boolean viaProto = false; - - private ApplicationId applicationId; - - public ApplicationFinishDataPBImpl() { - builder = ApplicationFinishDataProto.newBuilder(); - } - - public ApplicationFinishDataPBImpl(ApplicationFinishDataProto proto) { - this.proto = proto; - viaProto = true; - } - - @Override - public ApplicationId getApplicationId() { - if (this.applicationId != null) { - return this.applicationId; - } - ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationId()) { - return null; - } - this.applicationId = convertFromProtoFormat(p.getApplicationId()); - return this.applicationId; - } - - @Override - public void setApplicationId(ApplicationId applicationId) { - maybeInitBuilder(); - if (applicationId == null) { - builder.clearApplicationId(); - } - this.applicationId = applicationId; - } - - @Override - public long getFinishTime() { - ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getFinishTime(); - } - - @Override - public void setFinishTime(long finishTime) { - maybeInitBuilder(); - builder.setFinishTime(finishTime); - } - - @Override - public String getDiagnosticsInfo() { - ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasDiagnosticsInfo()) { - return null; - } - return p.getDiagnosticsInfo(); - } - - @Override - public void setDiagnosticsInfo(String diagnosticsInfo) { - maybeInitBuilder(); - if (diagnosticsInfo == null) { - builder.clearDiagnosticsInfo(); - return; - } - builder.setDiagnosticsInfo(diagnosticsInfo); - } - - @Override - public FinalApplicationStatus getFinalApplicationStatus() { - ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasFinalApplicationStatus()) { - return null; - } - return convertFromProtoFormat(p.getFinalApplicationStatus()); - } - - @Override - public void setFinalApplicationStatus( - FinalApplicationStatus finalApplicationStatus) { - maybeInitBuilder(); - if (finalApplicationStatus == null) { - builder.clearFinalApplicationStatus(); - return; - } - builder - .setFinalApplicationStatus(convertToProtoFormat(finalApplicationStatus)); - } - - @Override - public YarnApplicationState getYarnApplicationState() { - ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasYarnApplicationState()) { - return null; - } - return convertFromProtoFormat(p.getYarnApplicationState()); - } - - @Override - public void setYarnApplicationState(YarnApplicationState state) { - maybeInitBuilder(); - if (state == null) { - builder.clearYarnApplicationState(); - return; - } - builder.setYarnApplicationState(convertToProtoFormat(state)); - } - - public ApplicationFinishDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - @Override - public int hashCode() { - return getProto().hashCode(); - } - - @Override - public boolean equals(Object other) { - if (other == null) - return false; - if (other.getClass().isAssignableFrom(this.getClass())) { - return this.getProto().equals(this.getClass().cast(other).getProto()); - } - return false; - } - - @Override - public String toString() { - return TextFormat.shortDebugString(getProto()); - } - - private void mergeLocalToBuilder() { - if (this.applicationId != null - && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( - builder.getApplicationId())) { - builder.setApplicationId(convertToProtoFormat(this.applicationId)); - } - } - - private void mergeLocalToProto() { - if (viaProto) { - maybeInitBuilder(); - } - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ApplicationFinishDataProto.newBuilder(proto); - } - viaProto = false; - } - - private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) { - return ((ApplicationIdPBImpl) applicationId).getProto(); - } - - private ApplicationIdPBImpl convertFromProtoFormat( - ApplicationIdProto applicationId) { - return new ApplicationIdPBImpl(applicationId); - } - - private FinalApplicationStatus convertFromProtoFormat( - FinalApplicationStatusProto finalApplicationStatus) { - return ProtoUtils.convertFromProtoFormat(finalApplicationStatus); - } - - private FinalApplicationStatusProto convertToProtoFormat( - FinalApplicationStatus finalApplicationStatus) { - return ProtoUtils.convertToProtoFormat(finalApplicationStatus); - } - - private YarnApplicationStateProto convertToProtoFormat( - YarnApplicationState state) { - return ProtoUtils.convertToProtoFormat(state); - } - - private YarnApplicationState convertFromProtoFormat( - YarnApplicationStateProto yarnApplicationState) { - return ProtoUtils.convertFromProtoFormat(yarnApplicationState); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationStartDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationStartDataPBImpl.java deleted file mode 100644 index 56f7aff..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationStartDataPBImpl.java +++ /dev/null @@ -1,229 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; - -import com.google.protobuf.TextFormat; - -public class ApplicationStartDataPBImpl extends ApplicationStartData { - - ApplicationStartDataProto proto = ApplicationStartDataProto - .getDefaultInstance(); - ApplicationStartDataProto.Builder builder = null; - boolean viaProto = false; - - private ApplicationId applicationId; - - public ApplicationStartDataPBImpl() { - builder = ApplicationStartDataProto.newBuilder(); - } - - public ApplicationStartDataPBImpl(ApplicationStartDataProto proto) { - this.proto = proto; - viaProto = true; - } - - @Override - public ApplicationId getApplicationId() { - if (this.applicationId != null) { - return this.applicationId; - } - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationId()) { - return null; - } - this.applicationId = convertFromProtoFormat(p.getApplicationId()); - return this.applicationId; - } - - @Override - public void setApplicationId(ApplicationId applicationId) { - maybeInitBuilder(); - if (applicationId == null) { - builder.clearApplicationId(); - } - this.applicationId = applicationId; - } - - @Override - public String getApplicationName() { - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationName()) { - return null; - } - return p.getApplicationName(); - } - - @Override - public void setApplicationName(String applicationName) { - maybeInitBuilder(); - if (applicationName == null) { - builder.clearApplicationName(); - return; - } - builder.setApplicationName(applicationName); - } - - @Override - public String getApplicationType() { - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasApplicationType()) { - return null; - } - return p.getApplicationType(); - } - - @Override - public void setApplicationType(String applicationType) { - maybeInitBuilder(); - if (applicationType == null) { - builder.clearApplicationType(); - return; - } - builder.setApplicationType(applicationType); - } - - @Override - public String getUser() { - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasUser()) { - return null; - } - return p.getUser(); - } - - @Override - public void setUser(String user) { - maybeInitBuilder(); - if (user == null) { - builder.clearUser(); - return; - } - builder.setUser(user); - } - - @Override - public String getQueue() { - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasQueue()) { - return null; - } - return p.getQueue(); - } - - @Override - public void setQueue(String queue) { - maybeInitBuilder(); - if (queue == null) { - builder.clearQueue(); - return; - } - builder.setQueue(queue); - } - - @Override - public long getSubmitTime() { - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getSubmitTime(); - } - - @Override - public void setSubmitTime(long submitTime) { - maybeInitBuilder(); - builder.setSubmitTime(submitTime); - } - - @Override - public long getStartTime() { - ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getStartTime(); - } - - @Override - public void setStartTime(long startTime) { - maybeInitBuilder(); - builder.setStartTime(startTime); - } - - public ApplicationStartDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - @Override - public int hashCode() { - return getProto().hashCode(); - } - - @Override - public boolean equals(Object other) { - if (other == null) - return false; - if (other.getClass().isAssignableFrom(this.getClass())) { - return this.getProto().equals(this.getClass().cast(other).getProto()); - } - return false; - } - - @Override - public String toString() { - return TextFormat.shortDebugString(getProto()); - } - - private void mergeLocalToBuilder() { - if (this.applicationId != null - && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( - builder.getApplicationId())) { - builder.setApplicationId(convertToProtoFormat(this.applicationId)); - } - } - - private void mergeLocalToProto() { - if (viaProto) { - maybeInitBuilder(); - } - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ApplicationStartDataProto.newBuilder(proto); - } - viaProto = false; - } - - private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) { - return ((ApplicationIdPBImpl) applicationId).getProto(); - } - - private ApplicationIdPBImpl convertFromProtoFormat( - ApplicationIdProto applicationId) { - return new ApplicationIdPBImpl(applicationId); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ContainerFinishDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ContainerFinishDataPBImpl.java deleted file mode 100644 index 8bc01e0..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ContainerFinishDataPBImpl.java +++ /dev/null @@ -1,204 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; - -import com.google.protobuf.TextFormat; - -public class ContainerFinishDataPBImpl extends ContainerFinishData { - - ContainerFinishDataProto proto = ContainerFinishDataProto - .getDefaultInstance(); - ContainerFinishDataProto.Builder builder = null; - boolean viaProto = false; - - private ContainerId containerId; - - public ContainerFinishDataPBImpl() { - builder = ContainerFinishDataProto.newBuilder(); - } - - public ContainerFinishDataPBImpl(ContainerFinishDataProto proto) { - this.proto = proto; - viaProto = true; - } - - @Override - public ContainerId getContainerId() { - if (this.containerId != null) { - return this.containerId; - } - ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasContainerId()) { - return null; - } - this.containerId = convertFromProtoFormat(p.getContainerId()); - return this.containerId; - } - - @Override - public void setContainerId(ContainerId containerId) { - maybeInitBuilder(); - if (containerId == null) { - builder.clearContainerId(); - } - this.containerId = containerId; - } - - @Override - public long getFinishTime() { - ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getFinishTime(); - } - - @Override - public void setFinishTime(long finishTime) { - maybeInitBuilder(); - builder.setFinishTime(finishTime); - } - - @Override - public String getDiagnosticsInfo() { - ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasDiagnosticsInfo()) { - return null; - } - return p.getDiagnosticsInfo(); - } - - @Override - public void setDiagnosticsInfo(String diagnosticsInfo) { - maybeInitBuilder(); - if (diagnosticsInfo == null) { - builder.clearDiagnosticsInfo(); - return; - } - builder.setDiagnosticsInfo(diagnosticsInfo); - } - - @Override - public int getContainerExitStatus() { - ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getContainerExitStatus(); - } - - @Override - public ContainerState getContainerState() { - ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasContainerState()) { - return null; - } - return convertFromProtoFormat(p.getContainerState()); - } - - @Override - public void setContainerState(ContainerState state) { - maybeInitBuilder(); - if (state == null) { - builder.clearContainerState(); - return; - } - builder.setContainerState(convertToProtoFormat(state)); - } - - @Override - public void setContainerExitStatus(int containerExitStatus) { - maybeInitBuilder(); - builder.setContainerExitStatus(containerExitStatus); - } - - public ContainerFinishDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - @Override - public int hashCode() { - return getProto().hashCode(); - } - - @Override - public boolean equals(Object other) { - if (other == null) - return false; - if (other.getClass().isAssignableFrom(this.getClass())) { - return this.getProto().equals(this.getClass().cast(other).getProto()); - } - return false; - } - - @Override - public String toString() { - return TextFormat.shortDebugString(getProto()); - } - - private void mergeLocalToBuilder() { - if (this.containerId != null - && !((ContainerIdPBImpl) this.containerId).getProto().equals( - builder.getContainerId())) { - builder.setContainerId(convertToProtoFormat(this.containerId)); - } - } - - private void mergeLocalToProto() { - if (viaProto) { - maybeInitBuilder(); - } - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ContainerFinishDataProto.newBuilder(proto); - } - viaProto = false; - } - - private ContainerIdProto convertToProtoFormat(ContainerId containerId) { - return ((ContainerIdPBImpl) containerId).getProto(); - } - - private ContainerIdPBImpl - convertFromProtoFormat(ContainerIdProto containerId) { - return new ContainerIdPBImpl(containerId); - } - - private ContainerStateProto convertToProtoFormat(ContainerState state) { - return ProtoUtils.convertToProtoFormat(state); - } - - private ContainerState convertFromProtoFormat( - ContainerStateProto containerState) { - return ProtoUtils.convertFromProtoFormat(containerState); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ContainerStartDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ContainerStartDataPBImpl.java deleted file mode 100644 index 6d248b2..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ContainerStartDataPBImpl.java +++ /dev/null @@ -1,258 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto; -import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; - -import com.google.protobuf.TextFormat; - -public class ContainerStartDataPBImpl extends ContainerStartData { - - ContainerStartDataProto proto = ContainerStartDataProto.getDefaultInstance(); - ContainerStartDataProto.Builder builder = null; - boolean viaProto = false; - - private ContainerId containerId; - private Resource resource; - private NodeId nodeId; - private Priority priority; - - public ContainerStartDataPBImpl() { - builder = ContainerStartDataProto.newBuilder(); - } - - public ContainerStartDataPBImpl(ContainerStartDataProto proto) { - this.proto = proto; - viaProto = true; - } - - @Override - public ContainerId getContainerId() { - if (this.containerId != null) { - return this.containerId; - } - ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasContainerId()) { - return null; - } - this.containerId = convertFromProtoFormat(p.getContainerId()); - return this.containerId; - } - - @Override - public void setContainerId(ContainerId containerId) { - maybeInitBuilder(); - if (containerId == null) { - builder.clearContainerId(); - } - this.containerId = containerId; - } - - @Override - public Resource getAllocatedResource() { - if (this.resource != null) { - return this.resource; - } - ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasAllocatedResource()) { - return null; - } - this.resource = convertFromProtoFormat(p.getAllocatedResource()); - return this.resource; - } - - @Override - public void setAllocatedResource(Resource resource) { - maybeInitBuilder(); - if (resource == null) { - builder.clearAllocatedResource(); - } - this.resource = resource; - } - - @Override - public NodeId getAssignedNode() { - if (this.nodeId != null) { - return this.nodeId; - } - ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasAssignedNodeId()) { - return null; - } - this.nodeId = convertFromProtoFormat(p.getAssignedNodeId()); - return this.nodeId; - } - - @Override - public void setAssignedNode(NodeId nodeId) { - maybeInitBuilder(); - if (nodeId == null) { - builder.clearAssignedNodeId(); - } - this.nodeId = nodeId; - } - - @Override - public Priority getPriority() { - if (this.priority != null) { - return this.priority; - } - ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasPriority()) { - return null; - } - this.priority = convertFromProtoFormat(p.getPriority()); - return this.priority; - } - - @Override - public void setPriority(Priority priority) { - maybeInitBuilder(); - if (priority == null) { - builder.clearPriority(); - } - this.priority = priority; - } - - @Override - public long getStartTime() { - ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; - return p.getStartTime(); - } - - @Override - public void setStartTime(long startTime) { - maybeInitBuilder(); - builder.setStartTime(startTime); - } - - public ContainerStartDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - @Override - public int hashCode() { - return getProto().hashCode(); - } - - @Override - public boolean equals(Object other) { - if (other == null) - return false; - if (other.getClass().isAssignableFrom(this.getClass())) { - return this.getProto().equals(this.getClass().cast(other).getProto()); - } - return false; - } - - @Override - public String toString() { - return TextFormat.shortDebugString(getProto()); - } - - private void mergeLocalToBuilder() { - if (this.containerId != null - && !((ContainerIdPBImpl) this.containerId).getProto().equals( - builder.getContainerId())) { - builder.setContainerId(convertToProtoFormat(this.containerId)); - } - if (this.resource != null - && !((ResourcePBImpl) this.resource).getProto().equals( - builder.getAllocatedResource())) { - builder.setAllocatedResource(convertToProtoFormat(this.resource)); - } - if (this.nodeId != null - && !((NodeIdPBImpl) this.nodeId).getProto().equals( - builder.getAssignedNodeId())) { - builder.setAssignedNodeId(convertToProtoFormat(this.nodeId)); - } - if (this.priority != null - && !((PriorityPBImpl) this.priority).getProto().equals( - builder.getPriority())) { - builder.setPriority(convertToProtoFormat(this.priority)); - } - } - - private void mergeLocalToProto() { - if (viaProto) { - maybeInitBuilder(); - } - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ContainerStartDataProto.newBuilder(proto); - } - viaProto = false; - } - - private ContainerIdProto convertToProtoFormat(ContainerId containerId) { - return ((ContainerIdPBImpl) containerId).getProto(); - } - - private ContainerIdPBImpl - convertFromProtoFormat(ContainerIdProto containerId) { - return new ContainerIdPBImpl(containerId); - } - - private ResourceProto convertToProtoFormat(Resource resource) { - return ((ResourcePBImpl) resource).getProto(); - } - - private ResourcePBImpl convertFromProtoFormat(ResourceProto resource) { - return new ResourcePBImpl(resource); - } - - private NodeIdProto convertToProtoFormat(NodeId nodeId) { - return ((NodeIdPBImpl) nodeId).getProto(); - } - - private NodeIdPBImpl convertFromProtoFormat(NodeIdProto nodeId) { - return new NodeIdPBImpl(nodeId); - } - - private PriorityProto convertToProtoFormat(Priority priority) { - return ((PriorityPBImpl) priority).getProto(); - } - - private PriorityPBImpl convertFromProtoFormat(PriorityProto priority) { - return new PriorityPBImpl(priority); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java deleted file mode 100644 index c41b8a7..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; - -public class ApplicationHistoryStoreTestUtils { - - protected ApplicationHistoryStore store; - - protected void writeApplicationStartData(ApplicationId appId) - throws IOException { - store.applicationStarted(ApplicationStartData.newInstance(appId, - appId.toString(), "test type", "test queue", "test user", 0, 0)); - } - - protected void writeApplicationFinishData(ApplicationId appId) - throws IOException { - store.applicationFinished(ApplicationFinishData.newInstance(appId, 0, - appId.toString(), FinalApplicationStatus.UNDEFINED, - YarnApplicationState.FINISHED)); - } - - protected void writeApplicationAttemptStartData( - ApplicationAttemptId appAttemptId) throws IOException { - store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance( - appAttemptId, appAttemptId.toString(), 0, - ContainerId.newInstance(appAttemptId, 1))); - } - - protected void writeApplicationAttemptFinishData( - ApplicationAttemptId appAttemptId) throws IOException { - store.applicationAttemptFinished(ApplicationAttemptFinishData.newInstance( - appAttemptId, appAttemptId.toString(), "test tracking url", - FinalApplicationStatus.UNDEFINED, YarnApplicationAttemptState.FINISHED)); - } - - protected void writeContainerStartData(ContainerId containerId) - throws IOException { - store.containerStarted(ContainerStartData.newInstance(containerId, - Resource.newInstance(0, 0), NodeId.newInstance("localhost", 0), - Priority.newInstance(containerId.getId()), 0)); - } - - protected void writeContainerFinishData(ContainerId containerId) - throws IOException { - store.containerFinished(ContainerFinishData.newInstance(containerId, 0, - containerId.toString(), 0, ContainerState.COMPLETE)); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java index ad2907b..209a16e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java @@ -44,58 +44,45 @@ import org.apache.hadoop.yarn.api.records.ContainerReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; -import org.apache.hadoop.yarn.webapp.util.WebAppUtils; -import org.junit.After; -import org.junit.Before; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; +import org.junit.BeforeClass; import org.junit.Test; -public class TestApplicationHistoryClientService extends - ApplicationHistoryStoreTestUtils { +public class TestApplicationHistoryClientService { - ApplicationHistoryServer historyServer = null; - String expectedLogUrl = null; + private static ApplicationHistoryClientService clientService; - @Before - public void setup() { - historyServer = new ApplicationHistoryServer(); - Configuration config = new YarnConfiguration(); - expectedLogUrl = WebAppUtils.getHttpSchemePrefix(config) + - WebAppUtils.getAHSWebAppURLWithoutScheme(config) + - "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" + - "container_0_0001_01_000001/test user"; - config.setBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED, true); - config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE, - MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class); - historyServer.init(config); - historyServer.start(); - store = - ((ApplicationHistoryManagerImpl) historyServer.getApplicationHistoryManager()) - .getHistoryStore(); - } - - @After - public void tearDown() throws Exception { - AHSWebApp.resetInstance(); - historyServer.stop(); + @BeforeClass + public static void setup() throws Exception { + Configuration conf = new YarnConfiguration(); + TimelineStore store = TestApplicationHistoryManagerImpl.createStore(2); + TimelineACLsManager aclsManager = new TimelineACLsManager(conf); + TimelineDataManager dataManager = + new TimelineDataManager(store, aclsManager); + ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf); + ApplicationHistoryManagerImpl historyManager = + new ApplicationHistoryManagerImpl(dataManager, appAclsManager); + historyManager.init(conf); + historyManager.start(); + clientService = new ApplicationHistoryClientService(historyManager); } @Test public void testApplicationReport() throws IOException, YarnException { ApplicationId appId = null; appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); - writeApplicationFinishData(appId); GetApplicationReportRequest request = GetApplicationReportRequest.newInstance(appId); GetApplicationReportResponse response = - historyServer.getClientService().getClientHandler() - .getApplicationReport(request); + clientService.getClientHandler().getApplicationReport(request); ApplicationReport appReport = response.getApplicationReport(); Assert.assertNotNull(appReport); Assert.assertEquals("application_0_0001", appReport.getApplicationId() .toString()); - Assert.assertEquals("test type", appReport.getApplicationType().toString()); + Assert.assertEquals("test app type", appReport.getApplicationType().toString()); Assert.assertEquals("test queue", appReport.getQueue().toString()); } @@ -103,15 +90,10 @@ public void testApplicationReport() throws IOException, YarnException { public void testApplications() throws IOException, YarnException { ApplicationId appId = null; appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); - writeApplicationFinishData(appId); ApplicationId appId1 = ApplicationId.newInstance(0, 2); - writeApplicationStartData(appId1); - writeApplicationFinishData(appId1); GetApplicationsRequest request = GetApplicationsRequest.newInstance(); GetApplicationsResponse response = - historyServer.getClientService().getClientHandler() - .getApplications(request); + clientService.getClientHandler().getApplications(request); List appReport = response.getApplicationList(); Assert.assertNotNull(appReport); Assert.assertEquals(appId, appReport.get(0).getApplicationId()); @@ -123,13 +105,10 @@ public void testApplicationAttemptReport() throws IOException, YarnException { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - writeApplicationAttemptStartData(appAttemptId); - writeApplicationAttemptFinishData(appAttemptId); GetApplicationAttemptReportRequest request = GetApplicationAttemptReportRequest.newInstance(appAttemptId); GetApplicationAttemptReportResponse response = - historyServer.getClientService().getClientHandler() - .getApplicationAttemptReport(request); + clientService.getClientHandler().getApplicationAttemptReport(request); ApplicationAttemptReport attemptReport = response.getApplicationAttemptReport(); Assert.assertNotNull(attemptReport); @@ -144,15 +123,10 @@ public void testApplicationAttempts() throws IOException, YarnException { ApplicationAttemptId.newInstance(appId, 1); ApplicationAttemptId appAttemptId1 = ApplicationAttemptId.newInstance(appId, 2); - writeApplicationAttemptStartData(appAttemptId); - writeApplicationAttemptFinishData(appAttemptId); - writeApplicationAttemptStartData(appAttemptId1); - writeApplicationAttemptFinishData(appAttemptId1); GetApplicationAttemptsRequest request = GetApplicationAttemptsRequest.newInstance(appId); GetApplicationAttemptsResponse response = - historyServer.getClientService().getClientHandler() - .getApplicationAttempts(request); + clientService.getClientHandler().getApplicationAttempts(request); List attemptReports = response.getApplicationAttemptList(); Assert.assertNotNull(attemptReports); @@ -165,42 +139,32 @@ public void testApplicationAttempts() throws IOException, YarnException { @Test public void testContainerReport() throws IOException, YarnException { ApplicationId appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); - writeContainerStartData(containerId); - writeContainerFinishData(containerId); - writeApplicationFinishData(appId); GetContainerReportRequest request = GetContainerReportRequest.newInstance(containerId); GetContainerReportResponse response = - historyServer.getClientService().getClientHandler() - .getContainerReport(request); + clientService.getClientHandler().getContainerReport(request); ContainerReport container = response.getContainerReport(); Assert.assertNotNull(container); Assert.assertEquals(containerId, container.getContainerId()); - Assert.assertEquals(expectedLogUrl, container.getLogUrl()); + Assert.assertEquals("http://0.0.0.0:8188/applicationhistory/logs/" + + "test host:-100/container_0_0001_01_000001/" + + "container_0_0001_01_000001/user1", container.getLogUrl()); } @Test public void testContainers() throws IOException, YarnException { ApplicationId appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2); - writeContainerStartData(containerId); - writeContainerFinishData(containerId); - writeContainerStartData(containerId1); - writeContainerFinishData(containerId1); - writeApplicationFinishData(appId); GetContainersRequest request = GetContainersRequest.newInstance(appAttemptId); GetContainersResponse response = - historyServer.getClientService().getClientHandler() - .getContainers(request); + clientService.getClientHandler().getContainers(request); List containers = response.getContainerList(); Assert.assertNotNull(containers); Assert.assertEquals(containerId, containers.get(1).getContainerId()); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java index fec2bf3..4512fc5 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java @@ -18,57 +18,522 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice; -import java.io.IOException; +import java.lang.reflect.UndeclaredThrowableException; +import java.security.PrivilegedExceptionAction; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.SaslRpcServer.AuthMethod; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerReport; +import org.apache.hadoop.yarn.api.records.ContainerState; +import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; +import org.apache.hadoop.yarn.api.records.YarnApplicationState; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.metrics.AppAttemptMetricsConstants; +import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; +import org.apache.hadoop.yarn.server.metrics.ContainerMetricsConstants; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; import org.junit.After; import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; -public class TestApplicationHistoryManagerImpl extends - ApplicationHistoryStoreTestUtils { - ApplicationHistoryManagerImpl applicationHistoryManagerImpl = null; +@RunWith(Parameterized.class) +public class TestApplicationHistoryManagerImpl { + + private static final int SCALE = 5; + private static TimelineStore store; + + private ApplicationHistoryManagerImpl historyManager; + private UserGroupInformation callerUGI; + private Configuration conf; + + @BeforeClass + public static void prepareStore() throws Exception { + store = createStore(SCALE); + } + + public static TimelineStore createStore(int scale) throws Exception { + TimelineStore store = new MemoryTimelineStore(); + prepareTimelineStore(store, scale); + return store; + } @Before public void setup() throws Exception { - Configuration config = new Configuration(); - config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE, - MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class); - applicationHistoryManagerImpl = new ApplicationHistoryManagerImpl(); - applicationHistoryManagerImpl.init(config); - applicationHistoryManagerImpl.start(); - store = applicationHistoryManagerImpl.getHistoryStore(); + // Only test the ACLs of the generic history + TimelineACLsManager aclsManager = new TimelineACLsManager(new YarnConfiguration()); + TimelineDataManager dataManager = + new TimelineDataManager(store, aclsManager); + ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf); + historyManager = + new ApplicationHistoryManagerImpl(dataManager, appAclsManager); + historyManager.init(conf); + historyManager.start(); } @After - public void tearDown() throws Exception { - applicationHistoryManagerImpl.stop(); + public void tearDown() { + if (historyManager != null) { + historyManager.stop(); + } + } + + @Parameters + public static Collection callers() { + // user1 is the owner + // user2 is the authorized user + // user3 is the unauthorized user + // admin is the admin acl + return Arrays.asList( + new Object[][] { { "" }, { "user1" }, { "user2" }, { "user3" }, { "admin" } }); + } + + public TestApplicationHistoryManagerImpl(String caller) { + conf = new YarnConfiguration(); + if (!caller.equals("")) { + callerUGI = UserGroupInformation.createRemoteUser(caller, AuthMethod.SIMPLE); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); + } + } + + private static void prepareTimelineStore(TimelineStore store, int scale) + throws Exception { + for (int i = 1; i <= scale; ++i) { + TimelineEntities entities = new TimelineEntities(); + ApplicationId appId = ApplicationId.newInstance(0, i); + if (i == 2) { + entities.addEntity(createApplicationTimelineEntity(appId, true)); + } else { + entities.addEntity(createApplicationTimelineEntity(appId, false)); + } + store.put(entities); + for (int j = 1; j <= scale; ++j) { + entities = new TimelineEntities(); + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, j); + entities.addEntity(createAppAttemptTimelineEntity(appAttemptId)); + store.put(entities); + for (int k = 1; k <= scale; ++k) { + entities = new TimelineEntities(); + ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + entities.addEntity(createContainerEntity(containerId)); + store.put(entities); + } + } + } + } + + @Test + public void testGetApplicationReport() throws Exception { + for (int i = 1; i <= 2; ++i) { + final ApplicationId appId = ApplicationId.newInstance(0, i); + ApplicationReport app; + if (callerUGI == null) { + app = historyManager.getApplication(appId); + } else { + app = + callerUGI.doAs(new PrivilegedExceptionAction () { + @Override + public ApplicationReport run() throws Exception { + return historyManager.getApplication(appId); + } + }); + } + Assert.assertNotNull(app); + Assert.assertEquals(appId, app.getApplicationId()); + Assert.assertEquals("test app", app.getName()); + Assert.assertEquals("test app type", app.getApplicationType()); + Assert.assertEquals("user1", app.getUser()); + Assert.assertEquals("test queue", app.getQueue()); + Assert.assertEquals(Integer.MAX_VALUE + 2L, app.getStartTime()); + Assert.assertEquals(Integer.MAX_VALUE + 3L, app.getFinishTime()); + Assert.assertTrue(Math.abs(app.getProgress() - 1.0F) < 0.0001); + // App 2 doesn't have the ACLs, such that the default ACLs " " will be used. + // Nobody except admin and owner has access to the details of the app. + if ((i == 1 && callerUGI != null && + callerUGI.getShortUserName().equals("user3")) || + (i == 2 && callerUGI != null && + (callerUGI.getShortUserName().equals("user2") || + callerUGI.getShortUserName().equals("user3")))) { + Assert.assertEquals(ApplicationAttemptId.newInstance(appId, -1), + app.getCurrentApplicationAttemptId()); + Assert.assertEquals(null, app.getHost()); + Assert.assertEquals(-1, app.getRpcPort()); + Assert.assertEquals(null, app.getTrackingUrl()); + Assert.assertEquals(null, app.getOriginalTrackingUrl()); + Assert.assertEquals(null, app.getDiagnostics()); + } else { + Assert.assertEquals(ApplicationAttemptId.newInstance(appId, 1), + app.getCurrentApplicationAttemptId()); + Assert.assertEquals("test host", app.getHost()); + Assert.assertEquals(-100, app.getRpcPort()); + Assert.assertEquals("test tracking url", app.getTrackingUrl()); + Assert.assertEquals("test original tracking url", + app.getOriginalTrackingUrl()); + Assert.assertEquals("test diagnostics info", app.getDiagnostics()); + } + Assert.assertEquals(FinalApplicationStatus.UNDEFINED, + app.getFinalApplicationStatus()); + Assert.assertEquals(YarnApplicationState.FINISHED, + app.getYarnApplicationState()); + } + } + + @Test + public void testGetApplicationAttemptReport() throws Exception { + final ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1); + ApplicationAttemptReport appAttempt; + if (callerUGI == null) { + appAttempt = historyManager.getApplicationAttempt(appAttemptId); + } else { + try { + appAttempt = + callerUGI.doAs(new PrivilegedExceptionAction () { + @Override + public ApplicationAttemptReport run() throws Exception { + return historyManager.getApplicationAttempt(appAttemptId); + } + }); + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + // The exception is expected + Assert.fail(); + } + } catch (UndeclaredThrowableException e) { + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + if (e.getCause().getMessage().contains( + "does not have privilage to see this application")) { + // The exception is expected + return; + } + } + throw e; + } + } + Assert.assertNotNull(appAttempt); + Assert.assertEquals(appAttemptId, appAttempt.getApplicationAttemptId()); + Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), + appAttempt.getAMContainerId()); + Assert.assertEquals("test host", appAttempt.getHost()); + Assert.assertEquals(-100, appAttempt.getRpcPort()); + Assert.assertEquals("test tracking url", appAttempt.getTrackingUrl()); + Assert.assertEquals("test original tracking url", + appAttempt.getOriginalTrackingUrl()); + Assert.assertEquals("test diagnostics info", appAttempt.getDiagnostics()); + Assert.assertEquals(YarnApplicationAttemptState.FINISHED, + appAttempt.getYarnApplicationAttemptState()); } @Test - public void testApplicationReport() throws IOException, YarnException { - ApplicationId appId = null; - appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); - writeApplicationFinishData(appId); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, 1); - writeApplicationAttemptStartData(appAttemptId); - writeApplicationAttemptFinishData(appAttemptId); - ApplicationReport appReport = - applicationHistoryManagerImpl.getApplication(appId); - Assert.assertNotNull(appReport); - Assert.assertEquals(appId, appReport.getApplicationId()); - Assert.assertEquals(appAttemptId, - appReport.getCurrentApplicationAttemptId()); - Assert.assertEquals(appAttemptId.toString(), appReport.getHost()); - Assert.assertEquals("test type", appReport.getApplicationType().toString()); - Assert.assertEquals("test queue", appReport.getQueue().toString()); + public void testGetContainerReport() throws Exception { + final ContainerId containerId = + ContainerId.newInstance(ApplicationAttemptId.newInstance( + ApplicationId.newInstance(0, 1), 1), 1); + ContainerReport container; + if (callerUGI == null) { + container = historyManager.getContainer(containerId); + } else { + try { + container = + callerUGI.doAs(new PrivilegedExceptionAction () { + @Override + public ContainerReport run() throws Exception { + return historyManager.getContainer(containerId); + } + }); + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + // The exception is expected + Assert.fail(); + } + } catch (UndeclaredThrowableException e) { + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + if (e.getCause().getMessage().contains( + "does not have privilage to see this application")) { + // The exception is expected + return; + } + } + throw e; + } + } + Assert.assertNotNull(container); + Assert.assertEquals(Integer.MAX_VALUE + 1L, container.getCreationTime()); + Assert.assertEquals(Integer.MAX_VALUE + 2L, container.getFinishTime()); + Assert.assertEquals(Resource.newInstance(-1, -1), + container.getAllocatedResource()); + Assert.assertEquals(NodeId.newInstance("test host", -100), + container.getAssignedNode()); + Assert.assertEquals(Priority.UNDEFINED, container.getPriority()); + Assert + .assertEquals("test diagnostics info", container.getDiagnosticsInfo()); + Assert.assertEquals(ContainerState.COMPLETE, container.getContainerState()); + Assert.assertEquals(-1, container.getContainerExitStatus()); + Assert.assertEquals("http://0.0.0.0:8188/applicationhistory/logs/" + + "test host:-100/container_0_0001_01_000001/" + + "container_0_0001_01_000001/user1", container.getLogUrl()); + } + + @Test + public void testGetApplications() throws Exception { + Collection apps = + historyManager.getAllApplications().values(); + Assert.assertNotNull(apps); + Assert.assertEquals(SCALE, apps.size()); + } + + @Test + public void testGetApplicationAttempts() throws Exception { + final ApplicationId appId = ApplicationId.newInstance(0, 1); + Collection appAttempts; + if (callerUGI == null) { + appAttempts = historyManager.getApplicationAttempts(appId).values(); + } else { + try { + appAttempts = callerUGI.doAs( + new PrivilegedExceptionAction> () { + @Override + public Collection run() throws Exception { + return historyManager.getApplicationAttempts(appId).values(); + } + }); + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + // The exception is expected + Assert.fail(); + } + } catch (UndeclaredThrowableException e) { + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + if (e.getCause().getMessage().contains( + "does not have privilage to see this application")) { + // The exception is expected + return; + } + } + throw e; + } + } + Assert.assertNotNull(appAttempts); + Assert.assertEquals(SCALE, appAttempts.size()); + } + + @Test + public void testGetContainers() throws Exception { + final ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1); + Collection containers; + if (callerUGI == null) { + containers = historyManager.getContainers(appAttemptId).values(); + } else { + try { + containers = callerUGI.doAs( + new PrivilegedExceptionAction> () { + @Override + public Collection run() throws Exception { + return historyManager.getContainers(appAttemptId).values(); + } + }); + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + // The exception is expected + Assert.fail(); + } + } catch (UndeclaredThrowableException e) { + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + if (e.getCause().getMessage().contains( + "does not have privilage to see this application")) { + // The exception is expected + return; + } + } + throw e; + } + } + Assert.assertNotNull(containers); + Assert.assertEquals(SCALE, containers.size()); + } + + @Test + public void testGetAMContainer() throws Exception { + final ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1); + ContainerReport container; + if (callerUGI == null) { + container = historyManager.getAMContainer(appAttemptId); + } else { + try { + container = + callerUGI.doAs(new PrivilegedExceptionAction () { + @Override + public ContainerReport run() throws Exception { + return historyManager.getAMContainer(appAttemptId); + } + }); + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + // The exception is expected + Assert.fail(); + } + } catch (UndeclaredThrowableException e) { + if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { + if (e.getCause().getMessage().contains( + "does not have privilage to see this application")) { + // The exception is expected + return; + } + } + throw e; + } + } + Assert.assertNotNull(container); + Assert.assertEquals(appAttemptId, container.getContainerId() + .getApplicationAttemptId()); + } + + private static TimelineEntity createApplicationTimelineEntity( + ApplicationId appId, boolean emptyACLs) { + TimelineEntity entity = new TimelineEntity(); + entity.setEntityType(ApplicationMetricsConstants.ENTITY_TYPE); + entity.setEntityId(appId.toString()); + entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID); + entity.addPrimaryFilter( + TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn"); + Map entityInfo = new HashMap(); + entityInfo.put(ApplicationMetricsConstants.NAME_ENTITY_INFO, "test app"); + entityInfo.put(ApplicationMetricsConstants.TYPE_ENTITY_INFO, + "test app type"); + entityInfo.put(ApplicationMetricsConstants.USER_ENTITY_INFO, "user1"); + entityInfo.put(ApplicationMetricsConstants.QUEUE_ENTITY_INFO, "test queue"); + entityInfo.put(ApplicationMetricsConstants.SUBMITTED_TIME_ENTITY_INFO, + Integer.MAX_VALUE + 1L); + if (emptyACLs) { + entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, ""); + } else { + entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, + "user2"); + } + entity.setOtherInfo(entityInfo); + TimelineEvent tEvent = new TimelineEvent(); + tEvent.setEventType(ApplicationMetricsConstants.CREATED_EVENT_TYPE); + tEvent.setTimestamp(Integer.MAX_VALUE + 2L); + entity.addEvent(tEvent); + tEvent = new TimelineEvent(); + tEvent.setEventType( + ApplicationMetricsConstants.FINISHED_EVENT_TYPE); + tEvent.setTimestamp(Integer.MAX_VALUE + 3L); + Map eventInfo = new HashMap(); + eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, + "test diagnostics info"); + eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, + FinalApplicationStatus.UNDEFINED.toString()); + eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, + YarnApplicationState.FINISHED.toString()); + eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, + ApplicationAttemptId.newInstance(appId, 1)); + tEvent.setEventInfo(eventInfo); + entity.addEvent(tEvent); + return entity; + } + + private static TimelineEntity createAppAttemptTimelineEntity( + ApplicationAttemptId appAttemptId) { + TimelineEntity entity = new TimelineEntity(); + entity.setEntityType(AppAttemptMetricsConstants.ENTITY_TYPE); + entity.setEntityId(appAttemptId.toString()); + entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID); + entity.addPrimaryFilter(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, + appAttemptId.getApplicationId().toString()); + entity.addPrimaryFilter( + TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn"); + TimelineEvent tEvent = new TimelineEvent(); + tEvent.setEventType(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE); + tEvent.setTimestamp(Integer.MAX_VALUE + 1L); + Map eventInfo = new HashMap(); + eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO, + "test tracking url"); + eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO, + "test original tracking url"); + eventInfo.put(AppAttemptMetricsConstants.HOST_EVENT_INFO, "test host"); + eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO, -100); + eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO, + ContainerId.newInstance(appAttemptId, 1)); + tEvent.setEventInfo(eventInfo); + entity.addEvent(tEvent); + tEvent = new TimelineEvent(); + tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE); + tEvent.setTimestamp(Integer.MAX_VALUE + 2L); + eventInfo = new HashMap(); + eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO, + "test tracking url"); + eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO, + "test original tracking url"); + eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, + "test diagnostics info"); + eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_EVENT_INFO, + FinalApplicationStatus.UNDEFINED.toString()); + eventInfo.put(AppAttemptMetricsConstants.STATE_EVENT_INFO, + YarnApplicationAttemptState.FINISHED.toString()); + tEvent.setEventInfo(eventInfo); + entity.addEvent(tEvent); + return entity; + } + + private static TimelineEntity createContainerEntity(ContainerId containerId) { + TimelineEntity entity = new TimelineEntity(); + entity.setEntityType(ContainerMetricsConstants.ENTITY_TYPE); + entity.setEntityId(containerId.toString()); + entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID); + entity.addPrimaryFilter(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, + containerId.getApplicationAttemptId().toString()); + entity.addPrimaryFilter( + TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn"); + Map entityInfo = new HashMap(); + entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_ENTITY_INFO, -1); + entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_ENTITY_INFO, -1); + entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO, + "test host"); + entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO, -100); + entityInfo + .put(ContainerMetricsConstants.ALLOCATED_PRIORITY_ENTITY_INFO, -1); + entity.setOtherInfo(entityInfo); + TimelineEvent tEvent = new TimelineEvent(); + tEvent.setEventType(ContainerMetricsConstants.CREATED_EVENT_TYPE); + tEvent.setTimestamp(Integer.MAX_VALUE + 1L); + entity.addEvent(tEvent); + ; + tEvent = new TimelineEvent(); + tEvent.setEventType(ContainerMetricsConstants.FINISHED_EVENT_TYPE); + tEvent.setTimestamp(Integer.MAX_VALUE + 2L); + Map eventInfo = new HashMap(); + eventInfo.put(ContainerMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, + "test diagnostics info"); + eventInfo.put(ContainerMetricsConstants.EXIT_STATUS_EVENT_INFO, -1); + eventInfo.put(ContainerMetricsConstants.STATE_EVENT_INFO, + ContainerState.COMPLETE.toString()); + tEvent.setEventInfo(eventInfo); + entity.addEvent(tEvent); + return entity; } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java deleted file mode 100644 index f6c1481..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java +++ /dev/null @@ -1,534 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.lang.reflect.UndeclaredThrowableException; -import java.security.PrivilegedExceptionAction; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.SaslRpcServer.AuthMethod; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ApplicationReport; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerReport; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.metrics.AppAttemptMetricsConstants; -import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; -import org.apache.hadoop.yarn.server.metrics.ContainerMetricsConstants; -import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; -import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore; -import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; -import org.apache.hadoop.yarn.server.timeline.TimelineStore; -import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -public class TestApplicationHistoryManagerOnTimelineStore { - - private static final int SCALE = 5; - private static TimelineStore store; - - private ApplicationHistoryManagerOnTimelineStore historyManager; - private UserGroupInformation callerUGI; - private Configuration conf; - - @BeforeClass - public static void prepareStore() throws Exception { - store = new MemoryTimelineStore(); - prepareTimelineStore(store); - } - - @Before - public void setup() throws Exception { - // Only test the ACLs of the generic history - TimelineACLsManager aclsManager = new TimelineACLsManager(new YarnConfiguration()); - TimelineDataManager dataManager = - new TimelineDataManager(store, aclsManager); - ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf); - historyManager = - new ApplicationHistoryManagerOnTimelineStore(dataManager, appAclsManager); - historyManager.init(conf); - historyManager.start(); - } - - @After - public void tearDown() { - if (historyManager != null) { - historyManager.stop(); - } - } - - @Parameters - public static Collection callers() { - // user1 is the owner - // user2 is the authorized user - // user3 is the unauthorized user - // admin is the admin acl - return Arrays.asList( - new Object[][] { { "" }, { "user1" }, { "user2" }, { "user3" }, { "admin" } }); - } - - public TestApplicationHistoryManagerOnTimelineStore(String caller) { - conf = new YarnConfiguration(); - if (!caller.equals("")) { - callerUGI = UserGroupInformation.createRemoteUser(caller, AuthMethod.SIMPLE); - conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); - conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); - } - } - - private static void prepareTimelineStore(TimelineStore store) - throws Exception { - for (int i = 1; i <= SCALE; ++i) { - TimelineEntities entities = new TimelineEntities(); - ApplicationId appId = ApplicationId.newInstance(0, i); - if (i == 2) { - entities.addEntity(createApplicationTimelineEntity(appId, true)); - } else { - entities.addEntity(createApplicationTimelineEntity(appId, false)); - } - store.put(entities); - for (int j = 1; j <= SCALE; ++j) { - entities = new TimelineEntities(); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, j); - entities.addEntity(createAppAttemptTimelineEntity(appAttemptId)); - store.put(entities); - for (int k = 1; k <= SCALE; ++k) { - entities = new TimelineEntities(); - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); - entities.addEntity(createContainerEntity(containerId)); - store.put(entities); - } - } - } - } - - @Test - public void testGetApplicationReport() throws Exception { - for (int i = 1; i <= 2; ++i) { - final ApplicationId appId = ApplicationId.newInstance(0, i); - ApplicationReport app; - if (callerUGI == null) { - app = historyManager.getApplication(appId); - } else { - app = - callerUGI.doAs(new PrivilegedExceptionAction () { - @Override - public ApplicationReport run() throws Exception { - return historyManager.getApplication(appId); - } - }); - } - Assert.assertNotNull(app); - Assert.assertEquals(appId, app.getApplicationId()); - Assert.assertEquals("test app", app.getName()); - Assert.assertEquals("test app type", app.getApplicationType()); - Assert.assertEquals("user1", app.getUser()); - Assert.assertEquals("test queue", app.getQueue()); - Assert.assertEquals(Integer.MAX_VALUE + 2L, app.getStartTime()); - Assert.assertEquals(Integer.MAX_VALUE + 3L, app.getFinishTime()); - Assert.assertTrue(Math.abs(app.getProgress() - 1.0F) < 0.0001); - // App 2 doesn't have the ACLs, such that the default ACLs " " will be used. - // Nobody except admin and owner has access to the details of the app. - if ((i == 1 && callerUGI != null && - callerUGI.getShortUserName().equals("user3")) || - (i == 2 && callerUGI != null && - (callerUGI.getShortUserName().equals("user2") || - callerUGI.getShortUserName().equals("user3")))) { - Assert.assertEquals(ApplicationAttemptId.newInstance(appId, -1), - app.getCurrentApplicationAttemptId()); - Assert.assertEquals(null, app.getHost()); - Assert.assertEquals(-1, app.getRpcPort()); - Assert.assertEquals(null, app.getTrackingUrl()); - Assert.assertEquals(null, app.getOriginalTrackingUrl()); - Assert.assertEquals(null, app.getDiagnostics()); - } else { - Assert.assertEquals(ApplicationAttemptId.newInstance(appId, 1), - app.getCurrentApplicationAttemptId()); - Assert.assertEquals("test host", app.getHost()); - Assert.assertEquals(-100, app.getRpcPort()); - Assert.assertEquals("test tracking url", app.getTrackingUrl()); - Assert.assertEquals("test original tracking url", - app.getOriginalTrackingUrl()); - Assert.assertEquals("test diagnostics info", app.getDiagnostics()); - } - Assert.assertEquals(FinalApplicationStatus.UNDEFINED, - app.getFinalApplicationStatus()); - Assert.assertEquals(YarnApplicationState.FINISHED, - app.getYarnApplicationState()); - } - } - - @Test - public void testGetApplicationAttemptReport() throws Exception { - final ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1); - ApplicationAttemptReport appAttempt; - if (callerUGI == null) { - appAttempt = historyManager.getApplicationAttempt(appAttemptId); - } else { - try { - appAttempt = - callerUGI.doAs(new PrivilegedExceptionAction () { - @Override - public ApplicationAttemptReport run() throws Exception { - return historyManager.getApplicationAttempt(appAttemptId); - } - }); - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - // The exception is expected - Assert.fail(); - } - } catch (UndeclaredThrowableException e) { - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - if (e.getCause().getMessage().contains( - "does not have privilage to see this application")) { - // The exception is expected - return; - } - } - throw e; - } - } - Assert.assertNotNull(appAttempt); - Assert.assertEquals(appAttemptId, appAttempt.getApplicationAttemptId()); - Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), - appAttempt.getAMContainerId()); - Assert.assertEquals("test host", appAttempt.getHost()); - Assert.assertEquals(-100, appAttempt.getRpcPort()); - Assert.assertEquals("test tracking url", appAttempt.getTrackingUrl()); - Assert.assertEquals("test original tracking url", - appAttempt.getOriginalTrackingUrl()); - Assert.assertEquals("test diagnostics info", appAttempt.getDiagnostics()); - Assert.assertEquals(YarnApplicationAttemptState.FINISHED, - appAttempt.getYarnApplicationAttemptState()); - } - - @Test - public void testGetContainerReport() throws Exception { - final ContainerId containerId = - ContainerId.newInstance(ApplicationAttemptId.newInstance( - ApplicationId.newInstance(0, 1), 1), 1); - ContainerReport container; - if (callerUGI == null) { - container = historyManager.getContainer(containerId); - } else { - try { - container = - callerUGI.doAs(new PrivilegedExceptionAction () { - @Override - public ContainerReport run() throws Exception { - return historyManager.getContainer(containerId); - } - }); - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - // The exception is expected - Assert.fail(); - } - } catch (UndeclaredThrowableException e) { - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - if (e.getCause().getMessage().contains( - "does not have privilage to see this application")) { - // The exception is expected - return; - } - } - throw e; - } - } - Assert.assertNotNull(container); - Assert.assertEquals(Integer.MAX_VALUE + 1L, container.getCreationTime()); - Assert.assertEquals(Integer.MAX_VALUE + 2L, container.getFinishTime()); - Assert.assertEquals(Resource.newInstance(-1, -1), - container.getAllocatedResource()); - Assert.assertEquals(NodeId.newInstance("test host", -100), - container.getAssignedNode()); - Assert.assertEquals(Priority.UNDEFINED, container.getPriority()); - Assert - .assertEquals("test diagnostics info", container.getDiagnosticsInfo()); - Assert.assertEquals(ContainerState.COMPLETE, container.getContainerState()); - Assert.assertEquals(-1, container.getContainerExitStatus()); - Assert.assertEquals("http://0.0.0.0:8188/applicationhistory/logs/" + - "test host:-100/container_0_0001_01_000001/" - + "container_0_0001_01_000001/user1", container.getLogUrl()); - } - - @Test - public void testGetApplications() throws Exception { - Collection apps = - historyManager.getAllApplications().values(); - Assert.assertNotNull(apps); - Assert.assertEquals(SCALE, apps.size()); - } - - @Test - public void testGetApplicationAttempts() throws Exception { - final ApplicationId appId = ApplicationId.newInstance(0, 1); - Collection appAttempts; - if (callerUGI == null) { - appAttempts = historyManager.getApplicationAttempts(appId).values(); - } else { - try { - appAttempts = callerUGI.doAs( - new PrivilegedExceptionAction> () { - @Override - public Collection run() throws Exception { - return historyManager.getApplicationAttempts(appId).values(); - } - }); - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - // The exception is expected - Assert.fail(); - } - } catch (UndeclaredThrowableException e) { - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - if (e.getCause().getMessage().contains( - "does not have privilage to see this application")) { - // The exception is expected - return; - } - } - throw e; - } - } - Assert.assertNotNull(appAttempts); - Assert.assertEquals(SCALE, appAttempts.size()); - } - - @Test - public void testGetContainers() throws Exception { - final ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1); - Collection containers; - if (callerUGI == null) { - containers = historyManager.getContainers(appAttemptId).values(); - } else { - try { - containers = callerUGI.doAs( - new PrivilegedExceptionAction> () { - @Override - public Collection run() throws Exception { - return historyManager.getContainers(appAttemptId).values(); - } - }); - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - // The exception is expected - Assert.fail(); - } - } catch (UndeclaredThrowableException e) { - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - if (e.getCause().getMessage().contains( - "does not have privilage to see this application")) { - // The exception is expected - return; - } - } - throw e; - } - } - Assert.assertNotNull(containers); - Assert.assertEquals(SCALE, containers.size()); - } - - @Test - public void testGetAMContainer() throws Exception { - final ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1); - ContainerReport container; - if (callerUGI == null) { - container = historyManager.getAMContainer(appAttemptId); - } else { - try { - container = - callerUGI.doAs(new PrivilegedExceptionAction () { - @Override - public ContainerReport run() throws Exception { - return historyManager.getAMContainer(appAttemptId); - } - }); - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - // The exception is expected - Assert.fail(); - } - } catch (UndeclaredThrowableException e) { - if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) { - if (e.getCause().getMessage().contains( - "does not have privilage to see this application")) { - // The exception is expected - return; - } - } - throw e; - } - } - Assert.assertNotNull(container); - Assert.assertEquals(appAttemptId, container.getContainerId() - .getApplicationAttemptId()); - } - - private static TimelineEntity createApplicationTimelineEntity( - ApplicationId appId, boolean emptyACLs) { - TimelineEntity entity = new TimelineEntity(); - entity.setEntityType(ApplicationMetricsConstants.ENTITY_TYPE); - entity.setEntityId(appId.toString()); - entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID); - entity.addPrimaryFilter( - TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn"); - Map entityInfo = new HashMap(); - entityInfo.put(ApplicationMetricsConstants.NAME_ENTITY_INFO, "test app"); - entityInfo.put(ApplicationMetricsConstants.TYPE_ENTITY_INFO, - "test app type"); - entityInfo.put(ApplicationMetricsConstants.USER_ENTITY_INFO, "user1"); - entityInfo.put(ApplicationMetricsConstants.QUEUE_ENTITY_INFO, "test queue"); - entityInfo.put(ApplicationMetricsConstants.SUBMITTED_TIME_ENTITY_INFO, - Integer.MAX_VALUE + 1L); - if (emptyACLs) { - entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, ""); - } else { - entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, - "user2"); - } - entity.setOtherInfo(entityInfo); - TimelineEvent tEvent = new TimelineEvent(); - tEvent.setEventType(ApplicationMetricsConstants.CREATED_EVENT_TYPE); - tEvent.setTimestamp(Integer.MAX_VALUE + 2L); - entity.addEvent(tEvent); - tEvent = new TimelineEvent(); - tEvent.setEventType( - ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - tEvent.setTimestamp(Integer.MAX_VALUE + 3L); - Map eventInfo = new HashMap(); - eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, - "test diagnostics info"); - eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, - FinalApplicationStatus.UNDEFINED.toString()); - eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, - YarnApplicationState.FINISHED.toString()); - eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, - ApplicationAttemptId.newInstance(appId, 1)); - tEvent.setEventInfo(eventInfo); - entity.addEvent(tEvent); - return entity; - } - - private static TimelineEntity createAppAttemptTimelineEntity( - ApplicationAttemptId appAttemptId) { - TimelineEntity entity = new TimelineEntity(); - entity.setEntityType(AppAttemptMetricsConstants.ENTITY_TYPE); - entity.setEntityId(appAttemptId.toString()); - entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID); - entity.addPrimaryFilter(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, - appAttemptId.getApplicationId().toString()); - entity.addPrimaryFilter( - TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn"); - TimelineEvent tEvent = new TimelineEvent(); - tEvent.setEventType(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE); - tEvent.setTimestamp(Integer.MAX_VALUE + 1L); - Map eventInfo = new HashMap(); - eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO, - "test tracking url"); - eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO, - "test original tracking url"); - eventInfo.put(AppAttemptMetricsConstants.HOST_EVENT_INFO, "test host"); - eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO, -100); - eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO, - ContainerId.newInstance(appAttemptId, 1)); - tEvent.setEventInfo(eventInfo); - entity.addEvent(tEvent); - tEvent = new TimelineEvent(); - tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE); - tEvent.setTimestamp(Integer.MAX_VALUE + 2L); - eventInfo = new HashMap(); - eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_EVENT_INFO, - "test tracking url"); - eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_EVENT_INFO, - "test original tracking url"); - eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, - "test diagnostics info"); - eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_EVENT_INFO, - FinalApplicationStatus.UNDEFINED.toString()); - eventInfo.put(AppAttemptMetricsConstants.STATE_EVENT_INFO, - YarnApplicationAttemptState.FINISHED.toString()); - tEvent.setEventInfo(eventInfo); - entity.addEvent(tEvent); - return entity; - } - - private static TimelineEntity createContainerEntity(ContainerId containerId) { - TimelineEntity entity = new TimelineEntity(); - entity.setEntityType(ContainerMetricsConstants.ENTITY_TYPE); - entity.setEntityId(containerId.toString()); - entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID); - entity.addPrimaryFilter(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, - containerId.getApplicationAttemptId().toString()); - entity.addPrimaryFilter( - TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn"); - Map entityInfo = new HashMap(); - entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_ENTITY_INFO, -1); - entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_ENTITY_INFO, -1); - entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO, - "test host"); - entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO, -100); - entityInfo - .put(ContainerMetricsConstants.ALLOCATED_PRIORITY_ENTITY_INFO, -1); - entity.setOtherInfo(entityInfo); - TimelineEvent tEvent = new TimelineEvent(); - tEvent.setEventType(ContainerMetricsConstants.CREATED_EVENT_TYPE); - tEvent.setTimestamp(Integer.MAX_VALUE + 1L); - entity.addEvent(tEvent); - ; - tEvent = new TimelineEvent(); - tEvent.setEventType(ContainerMetricsConstants.FINISHED_EVENT_TYPE); - tEvent.setTimestamp(Integer.MAX_VALUE + 2L); - Map eventInfo = new HashMap(); - eventInfo.put(ContainerMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, - "test diagnostics info"); - eventInfo.put(ContainerMetricsConstants.EXIT_STATUS_EVENT_INFO, -1); - eventInfo.put(ContainerMetricsConstants.STATE_EVENT_INFO, - ContainerState.COMPLETE.toString()); - tEvent.setEventInfo(eventInfo); - entity.addEvent(tEvent); - return entity; - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java deleted file mode 100644 index 4ac6f4d..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java +++ /dev/null @@ -1,306 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; - -import org.junit.Assert; - -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RawLocalFileSystem; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class TestFileSystemApplicationHistoryStore extends - ApplicationHistoryStoreTestUtils { - - private static Log LOG = LogFactory - .getLog(TestFileSystemApplicationHistoryStore.class.getName()); - - private FileSystem fs; - private Path fsWorkingPath; - - @Before - public void setup() throws Exception { - fs = new RawLocalFileSystem(); - initAndStartStore(fs); - } - - private void initAndStartStore(final FileSystem fs) throws IOException, - URISyntaxException { - Configuration conf = new Configuration(); - fs.initialize(new URI("/"), conf); - fsWorkingPath = - new Path("target", - TestFileSystemApplicationHistoryStore.class.getSimpleName()); - fs.delete(fsWorkingPath, true); - conf.set(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI, - fsWorkingPath.toString()); - store = new FileSystemApplicationHistoryStore() { - @Override - protected FileSystem getFileSystem(Path path, Configuration conf) { - return fs; - } - }; - store.init(conf); - store.start(); - } - - @After - public void tearDown() throws Exception { - store.stop(); - fs.delete(fsWorkingPath, true); - fs.close(); - } - - @Test - public void testReadWriteHistoryData() throws IOException { - LOG.info("Starting testReadWriteHistoryData"); - testWriteHistoryData(5); - testReadHistoryData(5); - } - - private void testWriteHistoryData(int num) throws IOException { - testWriteHistoryData(num, false, false); - } - - private void testWriteHistoryData( - int num, boolean missingContainer, boolean missingApplicationAttempt) - throws IOException { - // write application history data - for (int i = 1; i <= num; ++i) { - ApplicationId appId = ApplicationId.newInstance(0, i); - writeApplicationStartData(appId); - - // write application attempt history data - for (int j = 1; j <= num; ++j) { - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, j); - writeApplicationAttemptStartData(appAttemptId); - - if (missingApplicationAttempt && j == num) { - continue; - } - // write container history data - for (int k = 1; k <= num; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); - writeContainerStartData(containerId); - if (missingContainer && k == num) { - continue; - } - writeContainerFinishData(containerId); - } - writeApplicationAttemptFinishData(appAttemptId); - } - writeApplicationFinishData(appId); - } - } - - private void testReadHistoryData(int num) throws IOException { - testReadHistoryData(num, false, false); - } - - private void testReadHistoryData( - int num, boolean missingContainer, boolean missingApplicationAttempt) - throws IOException { - // read application history data - Assert.assertEquals(num, store.getAllApplications().size()); - for (int i = 1; i <= num; ++i) { - ApplicationId appId = ApplicationId.newInstance(0, i); - ApplicationHistoryData appData = store.getApplication(appId); - Assert.assertNotNull(appData); - Assert.assertEquals(appId.toString(), appData.getApplicationName()); - Assert.assertEquals(appId.toString(), appData.getDiagnosticsInfo()); - - // read application attempt history data - Assert.assertEquals(num, store.getApplicationAttempts(appId).size()); - for (int j = 1; j <= num; ++j) { - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, j); - ApplicationAttemptHistoryData attemptData = - store.getApplicationAttempt(appAttemptId); - Assert.assertNotNull(attemptData); - Assert.assertEquals(appAttemptId.toString(), attemptData.getHost()); - - if (missingApplicationAttempt && j == num) { - Assert.assertNull(attemptData.getDiagnosticsInfo()); - continue; - } else { - Assert.assertEquals(appAttemptId.toString(), - attemptData.getDiagnosticsInfo()); - } - - // read container history data - Assert.assertEquals(num, store.getContainers(appAttemptId).size()); - for (int k = 1; k <= num; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); - ContainerHistoryData containerData = store.getContainer(containerId); - Assert.assertNotNull(containerData); - Assert.assertEquals(Priority.newInstance(containerId.getId()), - containerData.getPriority()); - if (missingContainer && k == num) { - Assert.assertNull(containerData.getDiagnosticsInfo()); - } else { - Assert.assertEquals(containerId.toString(), - containerData.getDiagnosticsInfo()); - } - } - ContainerHistoryData masterContainer = - store.getAMContainer(appAttemptId); - Assert.assertNotNull(masterContainer); - Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), - masterContainer.getContainerId()); - } - } - } - - @Test - public void testWriteAfterApplicationFinish() throws IOException { - LOG.info("Starting testWriteAfterApplicationFinish"); - ApplicationId appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); - writeApplicationFinishData(appId); - // write application attempt history data - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, 1); - try { - writeApplicationAttemptStartData(appAttemptId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is not opened")); - } - try { - writeApplicationAttemptFinishData(appAttemptId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is not opened")); - } - // write container history data - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); - try { - writeContainerStartData(containerId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is not opened")); - } - try { - writeContainerFinishData(containerId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is not opened")); - } - } - - @Test - public void testMassiveWriteContainerHistoryData() throws IOException { - LOG.info("Starting testMassiveWriteContainerHistoryData"); - long mb = 1024 * 1024; - long usedDiskBefore = fs.getContentSummary(fsWorkingPath).getLength() / mb; - ApplicationId appId = ApplicationId.newInstance(0, 1); - writeApplicationStartData(appId); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, 1); - for (int i = 1; i <= 100000; ++i) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, i); - writeContainerStartData(containerId); - writeContainerFinishData(containerId); - } - writeApplicationFinishData(appId); - long usedDiskAfter = fs.getContentSummary(fsWorkingPath).getLength() / mb; - Assert.assertTrue((usedDiskAfter - usedDiskBefore) < 20); - } - - @Test - public void testMissingContainerHistoryData() throws IOException { - LOG.info("Starting testMissingContainerHistoryData"); - testWriteHistoryData(3, true, false); - testReadHistoryData(3, true, false); - } - - @Test - public void testMissingApplicationAttemptHistoryData() throws IOException { - LOG.info("Starting testMissingApplicationAttemptHistoryData"); - testWriteHistoryData(3, false, true); - testReadHistoryData(3, false, true); - } - - @Test - public void testInitExistingWorkingDirectoryInSafeMode() throws Exception { - LOG.info("Starting testInitExistingWorkingDirectoryInSafeMode"); - tearDown(); - - // Setup file system to inject startup conditions - FileSystem fs = spy(new RawLocalFileSystem()); - doReturn(true).when(fs).isDirectory(any(Path.class)); - - try { - initAndStartStore(fs); - } catch (Exception e) { - Assert.fail("Exception should not be thrown: " + e); - } - - // Make sure that directory creation was not attempted - verify(fs, times(1)).isDirectory(any(Path.class)); - verify(fs, times(0)).mkdirs(any(Path.class)); - } - - @Test - public void testInitNonExistingWorkingDirectoryInSafeMode() throws Exception { - LOG.info("Starting testInitNonExistingWorkingDirectoryInSafeMode"); - tearDown(); - - // Setup file system to inject startup conditions - FileSystem fs = spy(new RawLocalFileSystem()); - doReturn(false).when(fs).isDirectory(any(Path.class)); - doThrow(new IOException()).when(fs).mkdirs(any(Path.class)); - - try { - initAndStartStore(fs); - Assert.fail("Exception should have been thrown"); - } catch (Exception e) { - // Expected failure - } - - // Make sure that directory creation was attempted - verify(fs, times(1)).isDirectory(any(Path.class)); - verify(fs, times(1)).mkdirs(any(Path.class)); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java deleted file mode 100644 index de561aa..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java +++ /dev/null @@ -1,204 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import java.io.IOException; - -import org.junit.Assert; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; -import org.junit.Before; -import org.junit.Test; - -public class TestMemoryApplicationHistoryStore extends - ApplicationHistoryStoreTestUtils { - - @Before - public void setup() { - store = new MemoryApplicationHistoryStore(); - } - - @Test - public void testReadWriteApplicationHistory() throws Exception { - // Out of order - ApplicationId appId = ApplicationId.newInstance(0, 1); - try { - writeApplicationFinishData(appId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains( - "is stored before the start information")); - } - // Normal - int numApps = 5; - for (int i = 1; i <= numApps; ++i) { - appId = ApplicationId.newInstance(0, i); - writeApplicationStartData(appId); - writeApplicationFinishData(appId); - } - Assert.assertEquals(numApps, store.getAllApplications().size()); - for (int i = 1; i <= numApps; ++i) { - appId = ApplicationId.newInstance(0, i); - ApplicationHistoryData data = store.getApplication(appId); - Assert.assertNotNull(data); - Assert.assertEquals(appId.toString(), data.getApplicationName()); - Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo()); - } - // Write again - appId = ApplicationId.newInstance(0, 1); - try { - writeApplicationStartData(appId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is already stored")); - } - try { - writeApplicationFinishData(appId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is already stored")); - } - } - - @Test - public void testReadWriteApplicationAttemptHistory() throws Exception { - // Out of order - ApplicationId appId = ApplicationId.newInstance(0, 1); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, 1); - try { - writeApplicationAttemptFinishData(appAttemptId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains( - "is stored before the start information")); - } - // Normal - int numAppAttempts = 5; - writeApplicationStartData(appId); - for (int i = 1; i <= numAppAttempts; ++i) { - appAttemptId = ApplicationAttemptId.newInstance(appId, i); - writeApplicationAttemptStartData(appAttemptId); - writeApplicationAttemptFinishData(appAttemptId); - } - Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId) - .size()); - for (int i = 1; i <= numAppAttempts; ++i) { - appAttemptId = ApplicationAttemptId.newInstance(appId, i); - ApplicationAttemptHistoryData data = - store.getApplicationAttempt(appAttemptId); - Assert.assertNotNull(data); - Assert.assertEquals(appAttemptId.toString(), data.getHost()); - Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo()); - } - writeApplicationFinishData(appId); - // Write again - appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - try { - writeApplicationAttemptStartData(appAttemptId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is already stored")); - } - try { - writeApplicationAttemptFinishData(appAttemptId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is already stored")); - } - } - - @Test - public void testReadWriteContainerHistory() throws Exception { - // Out of order - ApplicationId appId = ApplicationId.newInstance(0, 1); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); - try { - writeContainerFinishData(containerId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains( - "is stored before the start information")); - } - // Normal - writeApplicationAttemptStartData(appAttemptId); - int numContainers = 5; - for (int i = 1; i <= numContainers; ++i) { - containerId = ContainerId.newInstance(appAttemptId, i); - writeContainerStartData(containerId); - writeContainerFinishData(containerId); - } - Assert - .assertEquals(numContainers, store.getContainers(appAttemptId).size()); - for (int i = 1; i <= numContainers; ++i) { - containerId = ContainerId.newInstance(appAttemptId, i); - ContainerHistoryData data = store.getContainer(containerId); - Assert.assertNotNull(data); - Assert.assertEquals(Priority.newInstance(containerId.getId()), - data.getPriority()); - Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo()); - } - ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId); - Assert.assertNotNull(masterContainer); - Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), - masterContainer.getContainerId()); - writeApplicationAttemptFinishData(appAttemptId); - // Write again - containerId = ContainerId.newInstance(appAttemptId, 1); - try { - writeContainerStartData(containerId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is already stored")); - } - try { - writeContainerFinishData(containerId); - Assert.fail(); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains("is already stored")); - } - } - - @Test - public void testMassiveWriteContainerHistory() throws IOException { - long mb = 1024 * 1024; - Runtime runtime = Runtime.getRuntime(); - long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb; - int numContainers = 100000; - ApplicationId appId = ApplicationId.newInstance(0, 1); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, 1); - for (int i = 1; i <= numContainers; ++i) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, i); - writeContainerStartData(containerId); - writeContainerFinishData(containerId); - } - long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb; - Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 400); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java index 82c4276..7ec6b6b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java @@ -20,7 +20,6 @@ import static org.apache.hadoop.yarn.webapp.Params.TITLE; import static org.mockito.Mockito.mock; -import org.junit.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -31,26 +30,35 @@ import org.apache.hadoop.yarn.server.api.ApplicationContext; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerImpl; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStoreTestUtils; -import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.TestApplicationHistoryManagerImpl; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.webapp.YarnWebParams; import org.apache.hadoop.yarn.webapp.test.WebAppTests; -import org.junit.Before; +import org.junit.Assert; +import org.junit.BeforeClass; import org.junit.Test; import com.google.inject.Injector; -public class TestAHSWebApp extends ApplicationHistoryStoreTestUtils { - - public void setApplicationHistoryStore(ApplicationHistoryStore store) { - this.store = store; - } - - @Before - public void setup() { - store = new MemoryApplicationHistoryStore(); +public class TestAHSWebApp { + + private static ApplicationHistoryManagerImpl historyManager; + + @BeforeClass + public static void setup() throws Exception { + Configuration conf = new YarnConfiguration(); + TimelineStore store = TestApplicationHistoryManagerImpl.createStore(5); + TimelineACLsManager aclsManager = new TimelineACLsManager(conf); + TimelineDataManager dataManager = + new TimelineDataManager(store, aclsManager); + ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf); + historyManager = new ApplicationHistoryManagerImpl(dataManager, appAclsManager); + historyManager.init(conf); + historyManager.start(); } @Test @@ -67,9 +75,8 @@ public void testAppControllerIndex() throws Exception { @Test public void testView() throws Exception { - Injector injector = - WebAppTests.createMockInjector(ApplicationContext.class, - mockApplicationHistoryManager(5, 1, 1)); + Injector injector = WebAppTests.createMockInjector( + ApplicationContext.class, historyManager); AHSView ahsViewInstance = injector.getInstance(AHSView.class); ahsViewInstance.render(); @@ -88,9 +95,8 @@ public void testView() throws Exception { @Test public void testAppPage() throws Exception { - Injector injector = - WebAppTests.createMockInjector(ApplicationContext.class, - mockApplicationHistoryManager(1, 5, 1)); + Injector injector = WebAppTests.createMockInjector( + ApplicationContext.class, historyManager); AppPage appPageInstance = injector.getInstance(AppPage.class); appPageInstance.render(); @@ -104,9 +110,8 @@ public void testAppPage() throws Exception { @Test public void testAppAttemptPage() throws Exception { - Injector injector = - WebAppTests.createMockInjector(ApplicationContext.class, - mockApplicationHistoryManager(1, 1, 5)); + Injector injector = WebAppTests.createMockInjector( + ApplicationContext.class, historyManager); AppAttemptPage appAttemptPageInstance = injector.getInstance(AppAttemptPage.class); @@ -122,9 +127,8 @@ public void testAppAttemptPage() throws Exception { @Test public void testContainerPage() throws Exception { - Injector injector = - WebAppTests.createMockInjector(ApplicationContext.class, - mockApplicationHistoryManager(1, 1, 1)); + Injector injector = WebAppTests.createMockInjector( + ApplicationContext.class, historyManager); ContainerPage containerPageInstance = injector.getInstance(ContainerPage.class); @@ -141,42 +145,4 @@ public void testContainerPage() throws Exception { WebAppTests.flushOutput(injector); } - ApplicationHistoryManager mockApplicationHistoryManager(int numApps, - int numAppAttempts, int numContainers) throws Exception { - ApplicationHistoryManager ahManager = - new MockApplicationHistoryManagerImpl(store); - for (int i = 1; i <= numApps; ++i) { - ApplicationId appId = ApplicationId.newInstance(0, i); - writeApplicationStartData(appId); - for (int j = 1; j <= numAppAttempts; ++j) { - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, j); - writeApplicationAttemptStartData(appAttemptId); - for (int k = 1; k <= numContainers; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); - writeContainerStartData(containerId); - writeContainerFinishData(containerId); - } - writeApplicationAttemptFinishData(appAttemptId); - } - writeApplicationFinishData(appId); - } - return ahManager; - } - - class MockApplicationHistoryManagerImpl extends ApplicationHistoryManagerImpl { - - public MockApplicationHistoryManagerImpl(ApplicationHistoryStore store) { - super(); - init(new YarnConfiguration()); - start(); - } - - @Override - protected ApplicationHistoryStore createApplicationHistoryStore( - Configuration conf) { - return store; - } - }; - } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java index b348443..c10bb2b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java @@ -23,8 +23,6 @@ import javax.ws.rs.core.MediaType; -import org.junit.Assert; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -32,14 +30,16 @@ import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.api.ApplicationContext; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerImpl; +import org.apache.hadoop.yarn.server.applicationhistoryservice.TestApplicationHistoryManagerImpl; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; @@ -47,6 +47,7 @@ import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import com.google.inject.Guice; @@ -63,7 +64,21 @@ public class TestAHSWebServices extends JerseyTest { - private static ApplicationHistoryManager ahManager; + private static ApplicationHistoryManagerImpl historyManager; + + @BeforeClass + public static void setup() throws Exception { + Configuration conf = new YarnConfiguration(); + TimelineStore store = TestApplicationHistoryManagerImpl.createStore(5); + TimelineACLsManager aclsManager = new TimelineACLsManager(conf); + TimelineDataManager dataManager = + new TimelineDataManager(store, aclsManager); + ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf); + historyManager = new ApplicationHistoryManagerImpl(dataManager, appAclsManager); + historyManager.init(conf); + historyManager.start(); + } + private Injector injector = Guice.createInjector(new ServletModule() { @@ -72,12 +87,7 @@ protected void configureServlets() { bind(JAXBContextResolver.class); bind(AHSWebServices.class); bind(GenericExceptionHandler.class); - try { - ahManager = mockApplicationHistoryManager(); - } catch (Exception e) { - Assert.fail(); - } - bind(ApplicationContext.class).toInstance(ahManager); + bind(ApplicationContext.class).toInstance(historyManager); serve("/*").with(GuiceContainer.class); } }); @@ -90,16 +100,6 @@ protected Injector getInjector() { } } - private ApplicationHistoryManager mockApplicationHistoryManager() - throws Exception { - ApplicationHistoryStore store = new MemoryApplicationHistoryStore(); - TestAHSWebApp testAHSWebApp = new TestAHSWebApp(); - testAHSWebApp.setApplicationHistoryStore(store); - ApplicationHistoryManager ahManager = - testAHSWebApp.mockApplicationHistoryManager(5, 5, 5); - return ahManager; - } - public TestAHSWebServices() { super(new WebAppDescriptor.Builder( "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp") @@ -194,11 +194,11 @@ public void testSingleApp() throws Exception { assertEquals("incorrect number of elements", 1, json.length()); JSONObject app = json.getJSONObject("app"); assertEquals(appId.toString(), app.getString("appId")); - assertEquals(appId.toString(), app.get("name")); - assertEquals(appId.toString(), app.get("diagnosticsInfo")); + assertEquals("test app", app.get("name")); + assertEquals("test diagnostics info", app.get("diagnosticsInfo")); assertEquals("test queue", app.get("queue")); - assertEquals("test user", app.get("user")); - assertEquals("test type", app.get("type")); + assertEquals("user1", app.get("user")); + assertEquals("test app type", app.get("type")); assertEquals(FinalApplicationStatus.UNDEFINED.toString(), app.get("finalAppStatus")); assertEquals(YarnApplicationState.FINISHED.toString(), app.get("appState")); @@ -237,8 +237,8 @@ public void testSingleAttempt() throws Exception { assertEquals("incorrect number of elements", 1, json.length()); JSONObject appAttempt = json.getJSONObject("appAttempt"); assertEquals(appAttemptId.toString(), appAttempt.getString("appAttemptId")); - assertEquals(appAttemptId.toString(), appAttempt.getString("host")); - assertEquals(appAttemptId.toString(), + assertEquals("test host", appAttempt.getString("host")); + assertEquals("test diagnostics info", appAttempt.getString("diagnosticsInfo")); assertEquals("test tracking url", appAttempt.getString("trackingUrl")); assertEquals(YarnApplicationAttemptState.FINISHED.toString(), @@ -283,19 +283,18 @@ public void testSingleContainer() throws Exception { assertEquals("incorrect number of elements", 1, json.length()); JSONObject container = json.getJSONObject("container"); assertEquals(containerId.toString(), container.getString("containerId")); - assertEquals(containerId.toString(), container.getString("diagnosticsInfo")); - assertEquals("0", container.getString("allocatedMB")); - assertEquals("0", container.getString("allocatedVCores")); - assertEquals(NodeId.newInstance("localhost", 0).toString(), + assertEquals("test diagnostics info", + container.getString("diagnosticsInfo")); + assertEquals("-1", container.getString("allocatedMB")); + assertEquals("-1", container.getString("allocatedVCores")); + assertEquals(NodeId.newInstance("test host", -100).toString(), container.getString("assignedNodeId")); - assertEquals(Priority.newInstance(containerId.getId()).toString(), - container.getString("priority")); + assertEquals("-1", container.getString("priority")); Configuration conf = new YarnConfiguration(); assertEquals(WebAppUtils.getHttpSchemePrefix(conf) + WebAppUtils.getAHSWebAppURLWithoutScheme(conf) + - "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" + - "container_0_0001_01_000001/test user", - container.getString("logUrl")); + "/applicationhistory/logs/test host:-100/container_0_0001_01_000001/" + + "container_0_0001_01_000001/user1", container.getString("logUrl")); assertEquals(ContainerState.COMPLETE.toString(), container.getString("containerState")); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContext.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContext.java index a59965f..5144d5a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContext.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContext.java @@ -25,7 +25,6 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.ConfigurationProvider; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystem; @@ -95,11 +94,6 @@ void setRMDelegationTokenSecretManager( RMDelegationTokenSecretManager delegationTokenSecretManager); - RMApplicationHistoryWriter getRMApplicationHistoryWriter(); - - void setRMApplicationHistoryWriter( - RMApplicationHistoryWriter rmApplicationHistoryWriter); - void setSystemMetricsPublisher(SystemMetricsPublisher systemMetricsPublisher); SystemMetricsPublisher getSystemMetricsPublisher(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java index 78787ee..a123dcd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java @@ -32,7 +32,6 @@ import org.apache.hadoop.yarn.conf.ConfigurationProvider; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; @@ -88,7 +87,6 @@ private NodesListManager nodesListManager; private ResourceTrackerService resourceTrackerService; private ApplicationMasterService applicationMasterService; - private RMApplicationHistoryWriter rmApplicationHistoryWriter; private SystemMetricsPublisher systemMetricsPublisher; private ConfigurationProvider configurationProvider; private long epoch; @@ -118,8 +116,7 @@ public RMContextImpl(Dispatcher rmDispatcher, AMRMTokenSecretManager appTokenSecretManager, RMContainerTokenSecretManager containerTokenSecretManager, NMTokenSecretManagerInRM nmTokenSecretManager, - ClientToAMTokenSecretManagerInRM clientToAMTokenSecretManager, - RMApplicationHistoryWriter rmApplicationHistoryWriter) { + ClientToAMTokenSecretManagerInRM clientToAMTokenSecretManager) { this(); this.setDispatcher(rmDispatcher); this.setContainerAllocationExpirer(containerAllocationExpirer); @@ -130,7 +127,6 @@ public RMContextImpl(Dispatcher rmDispatcher, this.setContainerTokenSecretManager(containerTokenSecretManager); this.setNMTokenSecretManager(nmTokenSecretManager); this.setClientToAMTokenSecretManager(clientToAMTokenSecretManager); - this.setRMApplicationHistoryWriter(rmApplicationHistoryWriter); RMStateStore nullStore = new NullRMStateStore(); nullStore.setRMDispatcher(rmDispatcher); @@ -366,11 +362,6 @@ public boolean isWorkPreservingRecoveryEnabled() { } @Override - public RMApplicationHistoryWriter getRMApplicationHistoryWriter() { - return rmApplicationHistoryWriter; - } - - @Override public void setSystemMetricsPublisher( SystemMetricsPublisher systemMetricsPublisher) { this.systemMetricsPublisher = systemMetricsPublisher; @@ -382,12 +373,6 @@ public SystemMetricsPublisher getSystemMetricsPublisher() { } @Override - public void setRMApplicationHistoryWriter( - RMApplicationHistoryWriter rmApplicationHistoryWriter) { - this.rmApplicationHistoryWriter = rmApplicationHistoryWriter; - } - - @Override public ConfigurationProvider getConfigurationProvider() { return this.configurationProvider; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index 3e5f138..6a7d0ce 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -61,7 +61,6 @@ import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEventType; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; @@ -329,10 +328,6 @@ protected RMAppManager createRMAppManager() { this.applicationACLsManager, this.conf); } - protected RMApplicationHistoryWriter createRMApplicationHistoryWriter() { - return new RMApplicationHistoryWriter(); - } - protected SystemMetricsPublisher createSystemMetricsPublisher() { return new SystemMetricsPublisher(); } @@ -435,11 +430,6 @@ protected void serviceInit(Configuration configuration) throws Exception { rmContext.setDelegationTokenRenewer(delegationTokenRenewer); } - RMApplicationHistoryWriter rmApplicationHistoryWriter = - createRMApplicationHistoryWriter(); - addService(rmApplicationHistoryWriter); - rmContext.setRMApplicationHistoryWriter(rmApplicationHistoryWriter); - SystemMetricsPublisher systemMetricsPublisher = createSystemMetricsPublisher(); addService(systemMetricsPublisher); rmContext.setSystemMetricsPublisher(systemMetricsPublisher); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/RMApplicationHistoryWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/RMApplicationHistoryWriter.java deleted file mode 100644 index bd328ab..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/RMApplicationHistoryWriter.java +++ /dev/null @@ -1,363 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.service.CompositeService; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.event.Event; -import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryWriter; -import org.apache.hadoop.yarn.server.applicationhistoryservice.FileSystemApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; -import org.apache.hadoop.yarn.server.resourcemanager.RMServerUtils; -import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; -import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; - -import com.google.common.annotations.VisibleForTesting; - -/** - *

- * {@link ResourceManager} uses this class to write the information of - * {@link RMApp}, {@link RMAppAttempt} and {@link RMContainer}. These APIs are - * non-blocking, and just schedule a writing history event. An self-contained - * dispatcher vector will handle the event in separate threads, and extract the - * required fields that are going to be persisted. Then, the extracted - * information will be persisted via the implementation of - * {@link ApplicationHistoryStore}. - *

- */ -@Private -@Unstable -public class RMApplicationHistoryWriter extends CompositeService { - - public static final Log LOG = LogFactory - .getLog(RMApplicationHistoryWriter.class); - - private Dispatcher dispatcher; - @VisibleForTesting - ApplicationHistoryWriter writer; - @VisibleForTesting - boolean historyServiceEnabled; - - public RMApplicationHistoryWriter() { - super(RMApplicationHistoryWriter.class.getName()); - } - - @Override - protected synchronized void serviceInit(Configuration conf) throws Exception { - historyServiceEnabled = - conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED, - YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED); - if (conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE) == null || - conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE).length() == 0 || - conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE).equals( - NullApplicationHistoryStore.class.getName())) { - historyServiceEnabled = false; - } - - // Only create the services when the history service is enabled and not - // using the null store, preventing wasting the system resources. - if (historyServiceEnabled) { - writer = createApplicationHistoryStore(conf); - addIfService(writer); - - dispatcher = createDispatcher(conf); - dispatcher.register(WritingHistoryEventType.class, - new ForwardingEventHandler()); - addIfService(dispatcher); - } - super.serviceInit(conf); - } - - protected Dispatcher createDispatcher(Configuration conf) { - MultiThreadedDispatcher dispatcher = - new MultiThreadedDispatcher( - conf - .getInt( - YarnConfiguration.RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE, - YarnConfiguration.DEFAULT_RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE)); - dispatcher.setDrainEventsOnStop(); - return dispatcher; - } - - protected ApplicationHistoryStore createApplicationHistoryStore( - Configuration conf) { - try { - Class storeClass = - conf.getClass(YarnConfiguration.APPLICATION_HISTORY_STORE, - NullApplicationHistoryStore.class, - ApplicationHistoryStore.class); - return storeClass.newInstance(); - } catch (Exception e) { - String msg = - "Could not instantiate ApplicationHistoryWriter: " - + conf.get(YarnConfiguration.APPLICATION_HISTORY_STORE, - NullApplicationHistoryStore.class.getName()); - LOG.error(msg, e); - throw new YarnRuntimeException(msg, e); - } - } - - protected void handleWritingApplicationHistoryEvent( - WritingApplicationHistoryEvent event) { - switch (event.getType()) { - case APP_START: - WritingApplicationStartEvent wasEvent = - (WritingApplicationStartEvent) event; - try { - writer.applicationStarted(wasEvent.getApplicationStartData()); - LOG.info("Stored the start data of application " - + wasEvent.getApplicationId()); - } catch (IOException e) { - LOG.error("Error when storing the start data of application " - + wasEvent.getApplicationId()); - } - break; - case APP_FINISH: - WritingApplicationFinishEvent wafEvent = - (WritingApplicationFinishEvent) event; - try { - writer.applicationFinished(wafEvent.getApplicationFinishData()); - LOG.info("Stored the finish data of application " - + wafEvent.getApplicationId()); - } catch (IOException e) { - LOG.error("Error when storing the finish data of application " - + wafEvent.getApplicationId()); - } - break; - case APP_ATTEMPT_START: - WritingApplicationAttemptStartEvent waasEvent = - (WritingApplicationAttemptStartEvent) event; - try { - writer.applicationAttemptStarted(waasEvent - .getApplicationAttemptStartData()); - LOG.info("Stored the start data of application attempt " - + waasEvent.getApplicationAttemptId()); - } catch (IOException e) { - LOG.error("Error when storing the start data of application attempt " - + waasEvent.getApplicationAttemptId()); - } - break; - case APP_ATTEMPT_FINISH: - WritingApplicationAttemptFinishEvent waafEvent = - (WritingApplicationAttemptFinishEvent) event; - try { - writer.applicationAttemptFinished(waafEvent - .getApplicationAttemptFinishData()); - LOG.info("Stored the finish data of application attempt " - + waafEvent.getApplicationAttemptId()); - } catch (IOException e) { - LOG - .error("Error when storing the finish data of application attempt " - + waafEvent.getApplicationAttemptId()); - } - break; - case CONTAINER_START: - WritingContainerStartEvent wcsEvent = - (WritingContainerStartEvent) event; - try { - writer.containerStarted(wcsEvent.getContainerStartData()); - LOG.info("Stored the start data of container " - + wcsEvent.getContainerId()); - } catch (IOException e) { - LOG.error("Error when storing the start data of container " - + wcsEvent.getContainerId()); - } - break; - case CONTAINER_FINISH: - WritingContainerFinishEvent wcfEvent = - (WritingContainerFinishEvent) event; - try { - writer.containerFinished(wcfEvent.getContainerFinishData()); - LOG.info("Stored the finish data of container " - + wcfEvent.getContainerId()); - } catch (IOException e) { - LOG.error("Error when storing the finish data of container " - + wcfEvent.getContainerId()); - } - break; - default: - LOG.error("Unknown WritingApplicationHistoryEvent type: " - + event.getType()); - } - } - - @SuppressWarnings("unchecked") - public void applicationStarted(RMApp app) { - if (historyServiceEnabled) { - dispatcher.getEventHandler().handle( - new WritingApplicationStartEvent(app.getApplicationId(), - ApplicationStartData.newInstance(app.getApplicationId(), app.getName(), - app.getApplicationType(), app.getQueue(), app.getUser(), - app.getSubmitTime(), app.getStartTime()))); - } - } - - @SuppressWarnings("unchecked") - public void applicationFinished(RMApp app, RMAppState finalState) { - if (historyServiceEnabled) { - dispatcher.getEventHandler().handle( - new WritingApplicationFinishEvent(app.getApplicationId(), - ApplicationFinishData.newInstance(app.getApplicationId(), - app.getFinishTime(), app.getDiagnostics().toString(), - app.getFinalApplicationStatus(), - RMServerUtils.createApplicationState(finalState)))); - } - } - - @SuppressWarnings("unchecked") - public void applicationAttemptStarted(RMAppAttempt appAttempt) { - if (historyServiceEnabled) { - dispatcher.getEventHandler().handle( - new WritingApplicationAttemptStartEvent(appAttempt.getAppAttemptId(), - ApplicationAttemptStartData.newInstance(appAttempt.getAppAttemptId(), - appAttempt.getHost(), appAttempt.getRpcPort(), appAttempt - .getMasterContainer().getId()))); - } - } - - @SuppressWarnings("unchecked") - public void applicationAttemptFinished(RMAppAttempt appAttempt, - RMAppAttemptState finalState) { - if (historyServiceEnabled) { - dispatcher.getEventHandler().handle( - new WritingApplicationAttemptFinishEvent(appAttempt.getAppAttemptId(), - ApplicationAttemptFinishData.newInstance( - appAttempt.getAppAttemptId(), appAttempt.getDiagnostics() - .toString(), appAttempt.getTrackingUrl(), appAttempt - .getFinalApplicationStatus(), - RMServerUtils.createApplicationAttemptState(finalState)))); - } - } - - @SuppressWarnings("unchecked") - public void containerStarted(RMContainer container) { - if (historyServiceEnabled) { - dispatcher.getEventHandler().handle( - new WritingContainerStartEvent(container.getContainerId(), - ContainerStartData.newInstance(container.getContainerId(), - container.getAllocatedResource(), container.getAllocatedNode(), - container.getAllocatedPriority(), container.getCreationTime()))); - } - } - - @SuppressWarnings("unchecked") - public void containerFinished(RMContainer container) { - if (historyServiceEnabled) { - dispatcher.getEventHandler().handle( - new WritingContainerFinishEvent(container.getContainerId(), - ContainerFinishData.newInstance(container.getContainerId(), - container.getFinishTime(), container.getDiagnosticsInfo(), - container.getContainerExitStatus(), - container.getContainerState()))); - } - } - - /** - * EventHandler implementation which forward events to HistoryWriter Making - * use of it, HistoryWriter can avoid to have a public handle method - */ - private final class ForwardingEventHandler implements - EventHandler { - - @Override - public void handle(WritingApplicationHistoryEvent event) { - handleWritingApplicationHistoryEvent(event); - } - - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - protected static class MultiThreadedDispatcher extends CompositeService - implements Dispatcher { - - private List dispatchers = - new ArrayList(); - - public MultiThreadedDispatcher(int num) { - super(MultiThreadedDispatcher.class.getName()); - for (int i = 0; i < num; ++i) { - AsyncDispatcher dispatcher = createDispatcher(); - dispatchers.add(dispatcher); - addIfService(dispatcher); - } - } - - @Override - public EventHandler getEventHandler() { - return new CompositEventHandler(); - } - - @Override - public void register(Class eventType, EventHandler handler) { - for (AsyncDispatcher dispatcher : dispatchers) { - dispatcher.register(eventType, handler); - } - } - - public void setDrainEventsOnStop() { - for (AsyncDispatcher dispatcher : dispatchers) { - dispatcher.setDrainEventsOnStop(); - } - } - - private class CompositEventHandler implements EventHandler { - - @Override - public void handle(Event event) { - // Use hashCode (of ApplicationId) to dispatch the event to the child - // dispatcher, such that all the writing events of one application will - // be handled by one thread, the scheduled order of the these events - // will be preserved - int index = (event.hashCode() & Integer.MAX_VALUE) % dispatchers.size(); - dispatchers.get(index).getEventHandler().handle(event); - } - - } - - protected AsyncDispatcher createDispatcher() { - return new AsyncDispatcher(); - } - - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationAttemptFinishEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationAttemptFinishEvent.java deleted file mode 100644 index 3f6a620..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationAttemptFinishEvent.java +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; - -public class WritingApplicationAttemptFinishEvent extends - WritingApplicationHistoryEvent { - - private ApplicationAttemptId appAttemptId; - private ApplicationAttemptFinishData appAttemptFinish; - - public WritingApplicationAttemptFinishEvent( - ApplicationAttemptId appAttemptId, - ApplicationAttemptFinishData appAttemptFinish) { - super(WritingHistoryEventType.APP_ATTEMPT_FINISH); - this.appAttemptId = appAttemptId; - this.appAttemptFinish = appAttemptFinish; - } - - @Override - public int hashCode() { - return appAttemptId.getApplicationId().hashCode(); - } - - public ApplicationAttemptId getApplicationAttemptId() { - return appAttemptId; - } - - public ApplicationAttemptFinishData getApplicationAttemptFinishData() { - return appAttemptFinish; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationAttemptStartEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationAttemptStartEvent.java deleted file mode 100644 index 7e092d3..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationAttemptStartEvent.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; - -public class WritingApplicationAttemptStartEvent extends - WritingApplicationHistoryEvent { - - private ApplicationAttemptId appAttemptId; - private ApplicationAttemptStartData appAttemptStart; - - public WritingApplicationAttemptStartEvent(ApplicationAttemptId appAttemptId, - ApplicationAttemptStartData appAttemptStart) { - super(WritingHistoryEventType.APP_ATTEMPT_START); - this.appAttemptId = appAttemptId; - this.appAttemptStart = appAttemptStart; - } - - @Override - public int hashCode() { - return appAttemptId.getApplicationId().hashCode(); - } - - public ApplicationAttemptId getApplicationAttemptId() { - return appAttemptId; - } - - public ApplicationAttemptStartData getApplicationAttemptStartData() { - return appAttemptStart; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationFinishEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationFinishEvent.java deleted file mode 100644 index 7a20214..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationFinishEvent.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; - -public class WritingApplicationFinishEvent extends - WritingApplicationHistoryEvent { - - private ApplicationId appId; - private ApplicationFinishData appFinish; - - public WritingApplicationFinishEvent(ApplicationId appId, - ApplicationFinishData appFinish) { - super(WritingHistoryEventType.APP_FINISH); - this.appId = appId; - this.appFinish = appFinish; - } - - @Override - public int hashCode() { - return appId.hashCode(); - } - - public ApplicationId getApplicationId() { - return appId; - } - - public ApplicationFinishData getApplicationFinishData() { - return appFinish; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationHistoryEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationHistoryEvent.java deleted file mode 100644 index bc17edc..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationHistoryEvent.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.event.AbstractEvent; - -public class WritingApplicationHistoryEvent extends - AbstractEvent { - - public WritingApplicationHistoryEvent(WritingHistoryEventType type) { - super(type); - } - -} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationStartEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationStartEvent.java deleted file mode 100644 index 1b5dc78..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingApplicationStartEvent.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; - -public class WritingApplicationStartEvent extends - WritingApplicationHistoryEvent { - - private ApplicationId appId; - private ApplicationStartData appStart; - - public WritingApplicationStartEvent(ApplicationId appId, - ApplicationStartData appStart) { - super(WritingHistoryEventType.APP_START); - this.appId = appId; - this.appStart = appStart; - } - - @Override - public int hashCode() { - return appId.hashCode(); - } - - public ApplicationId getApplicationId() { - return appId; - } - - public ApplicationStartData getApplicationStartData() { - return appStart; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingContainerFinishEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingContainerFinishEvent.java deleted file mode 100644 index 6b27166..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingContainerFinishEvent.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; - -public class WritingContainerFinishEvent extends WritingApplicationHistoryEvent { - - private ContainerId containerId; - private ContainerFinishData containerFinish; - - public WritingContainerFinishEvent(ContainerId containerId, - ContainerFinishData containerFinish) { - super(WritingHistoryEventType.CONTAINER_FINISH); - this.containerId = containerId; - this.containerFinish = containerFinish; - } - - @Override - public int hashCode() { - return containerId.getApplicationAttemptId().getApplicationId().hashCode(); - } - - public ContainerId getContainerId() { - return containerId; - } - - public ContainerFinishData getContainerFinishData() { - return containerFinish; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingContainerStartEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingContainerStartEvent.java deleted file mode 100644 index f6df669..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingContainerStartEvent.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; - -public class WritingContainerStartEvent extends WritingApplicationHistoryEvent { - - private ContainerId containerId; - private ContainerStartData containerStart; - - public WritingContainerStartEvent(ContainerId containerId, - ContainerStartData containerStart) { - super(WritingHistoryEventType.CONTAINER_START); - this.containerId = containerId; - this.containerStart = containerStart; - } - - @Override - public int hashCode() { - return containerId.getApplicationAttemptId().getApplicationId().hashCode(); - } - - public ContainerId getContainerId() { - return containerId; - } - - public ContainerStartData getContainerStartData() { - return containerStart; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingHistoryEventType.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingHistoryEventType.java deleted file mode 100644 index 2f05428..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/WritingHistoryEventType.java +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -public enum WritingHistoryEventType { - APP_START, APP_FINISH, APP_ATTEMPT_START, APP_ATTEMPT_FINISH, - CONTAINER_START, CONTAINER_FINISH -} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java index c0681aa..a5fa947 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java @@ -390,7 +390,6 @@ public RMAppImpl(ApplicationId applicationId, RMContext rmContext, this.stateMachine = stateMachineFactory.make(this); - rmContext.getRMApplicationHistoryWriter().applicationStarted(this); rmContext.getSystemMetricsPublisher().appCreated(this, startTime); } @@ -1132,8 +1131,6 @@ public void transition(RMAppImpl app, RMAppEvent event) { new RMAppManagerEvent(app.applicationId, RMAppManagerEventType.APP_COMPLETED)); - app.rmContext.getRMApplicationHistoryWriter() - .applicationFinished(app, finalState); app.rmContext.getSystemMetricsPublisher() .appFinished(app, finalState, app.finishTime); }; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java index fbcb7d7..08381ed 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java @@ -1228,8 +1228,6 @@ public void transition(RMAppAttemptImpl appAttempt, appAttemptId, finalAttemptState, keepContainersAcrossAppAttempts)); appAttempt.removeCredentials(appAttempt); - appAttempt.rmContext.getRMApplicationHistoryWriter() - .applicationAttemptFinished(appAttempt, finalAttemptState); appAttempt.rmContext.getSystemMetricsPublisher() .appAttemptFinished(appAttempt, finalAttemptState, appAttempt.rmContext.getRMApps().get( @@ -1346,8 +1344,6 @@ public void transition(RMAppAttemptImpl appAttempt, // as that would mean an extra state-store write. For now, we hope that in // work-preserving restart, AMs are forced to reregister. - appAttempt.rmContext.getRMApplicationHistoryWriter() - .applicationAttemptStarted(appAttempt); appAttempt.rmContext.getSystemMetricsPublisher() .appAttemptRegistered(appAttempt, System.currentTimeMillis()); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java index 479734a..272d7d9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java @@ -41,7 +41,6 @@ import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRunningOnNodeEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerAllocatedEvent; @@ -191,7 +190,6 @@ public RMContainerImpl(Container container, this.readLock = lock.readLock(); this.writeLock = lock.writeLock(); - rmContext.getRMApplicationHistoryWriter().containerStarted(this); rmContext.getSystemMetricsPublisher().containerCreated( this, this.creationTime); } @@ -498,8 +496,6 @@ public void transition(RMContainerImpl container, RMContainerEvent event) { container.appAttemptId, finishedEvent.getRemoteContainerStatus(), container.getAllocatedNode())); - container.rmContext.getRMApplicationHistoryWriter().containerFinished( - container); container.rmContext.getSystemMetricsPublisher().containerFinished( container, container.finishTime); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java index 333d0cf..de29570 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java @@ -53,7 +53,6 @@ import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.MockRMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -112,10 +111,9 @@ public RMContext mockRMContext(int n, long time) { rmDispatcher); AMLivelinessMonitor amFinishingMonitor = new AMLivelinessMonitor( rmDispatcher); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); RMContext context = new RMContextImpl(rmDispatcher, containerAllocationExpirer, amLivelinessMonitor, amFinishingMonitor, - null, null, null, null, null, writer) { + null, null, null, null, null) { @Override public ConcurrentMap getRMApps() { return map; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index 954e21d..6da73c7 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -87,7 +87,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; -import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -115,7 +114,6 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; @@ -1098,8 +1096,6 @@ private void mockRMContext(YarnScheduler yarnScheduler, RMContext rmContext) .thenReturn(queInfo); when(yarnScheduler.getQueueInfo(eq("nonexistentqueue"), anyBoolean(), anyBoolean())) .thenThrow(new IOException("queue does not exist")); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); - when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); ConcurrentHashMap apps = getRMApps(rmContext, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMNodeTransitions.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMNodeTransitions.java index d877e25..136e95d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMNodeTransitions.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMNodeTransitions.java @@ -105,7 +105,7 @@ public void setUp() throws Exception { rmContext = new RMContextImpl(rmDispatcher, null, null, null, - mock(DelegationTokenRenewer.class), null, null, null, null, null); + mock(DelegationTokenRenewer.class), null, null, null, null); NodesListManager nodesListManager = mock(NodesListManager.class); HostsFileReader reader = mock(HostsFileReader.class); when(nodesListManager.getHostsReader()).thenReturn(reader); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java deleted file mode 100644 index 78077d4..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java +++ /dev/null @@ -1,554 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.resourcemanager.ahs; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; - -import org.junit.Assert; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.Container; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.ContainerState; -import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; -import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.event.Event; -import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; -import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; -import org.apache.hadoop.yarn.server.resourcemanager.MockAM; -import org.apache.hadoop.yarn.server.resourcemanager.MockNM; -import org.apache.hadoop.yarn.server.resourcemanager.MockRM; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; -import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; -import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; -import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; -import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class TestRMApplicationHistoryWriter { - - private static int MAX_RETRIES = 10; - - private RMApplicationHistoryWriter writer; - private ApplicationHistoryStore store; - private List dispatchers = - new ArrayList(); - - @Before - public void setup() { - store = new MemoryApplicationHistoryStore(); - Configuration conf = new Configuration(); - conf.setBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED, true); - conf.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE, - MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class); - writer = new RMApplicationHistoryWriter() { - - @Override - protected ApplicationHistoryStore createApplicationHistoryStore( - Configuration conf) { - return store; - } - - @Override - protected Dispatcher createDispatcher(Configuration conf) { - MultiThreadedDispatcher dispatcher = - new MultiThreadedDispatcher( - conf - .getInt( - YarnConfiguration.RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE, - YarnConfiguration.DEFAULT_RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE)); - dispatcher.setDrainEventsOnStop(); - return dispatcher; - } - - class MultiThreadedDispatcher extends - RMApplicationHistoryWriter.MultiThreadedDispatcher { - - public MultiThreadedDispatcher(int num) { - super(num); - } - - @Override - protected AsyncDispatcher createDispatcher() { - CounterDispatcher dispatcher = new CounterDispatcher(); - dispatchers.add(dispatcher); - return dispatcher; - } - - } - }; - writer.init(conf); - writer.start(); - } - - @After - public void tearDown() { - writer.stop(); - } - - private static RMApp createRMApp(ApplicationId appId) { - RMApp app = mock(RMApp.class); - when(app.getApplicationId()).thenReturn(appId); - when(app.getName()).thenReturn("test app"); - when(app.getApplicationType()).thenReturn("test app type"); - when(app.getUser()).thenReturn("test user"); - when(app.getQueue()).thenReturn("test queue"); - when(app.getSubmitTime()).thenReturn(0L); - when(app.getStartTime()).thenReturn(1L); - when(app.getFinishTime()).thenReturn(2L); - when(app.getDiagnostics()).thenReturn( - new StringBuilder("test diagnostics info")); - when(app.getFinalApplicationStatus()).thenReturn( - FinalApplicationStatus.UNDEFINED); - return app; - } - - private static RMAppAttempt createRMAppAttempt( - ApplicationAttemptId appAttemptId) { - RMAppAttempt appAttempt = mock(RMAppAttempt.class); - when(appAttempt.getAppAttemptId()).thenReturn(appAttemptId); - when(appAttempt.getHost()).thenReturn("test host"); - when(appAttempt.getRpcPort()).thenReturn(-100); - Container container = mock(Container.class); - when(container.getId()) - .thenReturn(ContainerId.newInstance(appAttemptId, 1)); - when(appAttempt.getMasterContainer()).thenReturn(container); - when(appAttempt.getDiagnostics()).thenReturn("test diagnostics info"); - when(appAttempt.getTrackingUrl()).thenReturn("test url"); - when(appAttempt.getFinalApplicationStatus()).thenReturn( - FinalApplicationStatus.UNDEFINED); - return appAttempt; - } - - private static RMContainer createRMContainer(ContainerId containerId) { - RMContainer container = mock(RMContainer.class); - when(container.getContainerId()).thenReturn(containerId); - when(container.getAllocatedNode()).thenReturn( - NodeId.newInstance("test host", -100)); - when(container.getAllocatedResource()).thenReturn( - Resource.newInstance(-1, -1)); - when(container.getAllocatedPriority()).thenReturn(Priority.UNDEFINED); - when(container.getCreationTime()).thenReturn(0L); - when(container.getFinishTime()).thenReturn(1L); - when(container.getDiagnosticsInfo()).thenReturn("test diagnostics info"); - when(container.getLogURL()).thenReturn("test log url"); - when(container.getContainerExitStatus()).thenReturn(-1); - when(container.getContainerState()).thenReturn(ContainerState.COMPLETE); - return container; - } - - @Test - public void testDefaultStoreSetup() throws Exception { - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED, true); - RMApplicationHistoryWriter writer = new RMApplicationHistoryWriter(); - writer.init(conf); - writer.start(); - try { - Assert.assertFalse(writer.historyServiceEnabled); - Assert.assertNull(writer.writer); - } finally { - writer.stop(); - writer.close(); - } - } - - @Test - public void testWriteApplication() throws Exception { - RMApp app = createRMApp(ApplicationId.newInstance(0, 1)); - - writer.applicationStarted(app); - ApplicationHistoryData appHD = null; - for (int i = 0; i < MAX_RETRIES; ++i) { - appHD = store.getApplication(ApplicationId.newInstance(0, 1)); - if (appHD != null) { - break; - } else { - Thread.sleep(100); - } - } - Assert.assertNotNull(appHD); - Assert.assertEquals("test app", appHD.getApplicationName()); - Assert.assertEquals("test app type", appHD.getApplicationType()); - Assert.assertEquals("test user", appHD.getUser()); - Assert.assertEquals("test queue", appHD.getQueue()); - Assert.assertEquals(0L, appHD.getSubmitTime()); - Assert.assertEquals(1L, appHD.getStartTime()); - - writer.applicationFinished(app, RMAppState.FINISHED); - for (int i = 0; i < MAX_RETRIES; ++i) { - appHD = store.getApplication(ApplicationId.newInstance(0, 1)); - if (appHD.getYarnApplicationState() != null) { - break; - } else { - Thread.sleep(100); - } - } - Assert.assertEquals(2L, appHD.getFinishTime()); - Assert.assertEquals("test diagnostics info", appHD.getDiagnosticsInfo()); - Assert.assertEquals(FinalApplicationStatus.UNDEFINED, - appHD.getFinalApplicationStatus()); - Assert.assertEquals(YarnApplicationState.FINISHED, - appHD.getYarnApplicationState()); - } - - @Test - public void testWriteApplicationAttempt() throws Exception { - RMAppAttempt appAttempt = - createRMAppAttempt(ApplicationAttemptId.newInstance( - ApplicationId.newInstance(0, 1), 1)); - writer.applicationAttemptStarted(appAttempt); - ApplicationAttemptHistoryData appAttemptHD = null; - for (int i = 0; i < MAX_RETRIES; ++i) { - appAttemptHD = - store.getApplicationAttempt(ApplicationAttemptId.newInstance( - ApplicationId.newInstance(0, 1), 1)); - if (appAttemptHD != null) { - break; - } else { - Thread.sleep(100); - } - } - Assert.assertNotNull(appAttemptHD); - Assert.assertEquals("test host", appAttemptHD.getHost()); - Assert.assertEquals(-100, appAttemptHD.getRPCPort()); - Assert.assertEquals(ContainerId.newInstance( - ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1), 1), - appAttemptHD.getMasterContainerId()); - - writer.applicationAttemptFinished(appAttempt, RMAppAttemptState.FINISHED); - for (int i = 0; i < MAX_RETRIES; ++i) { - appAttemptHD = - store.getApplicationAttempt(ApplicationAttemptId.newInstance( - ApplicationId.newInstance(0, 1), 1)); - if (appAttemptHD.getYarnApplicationAttemptState() != null) { - break; - } else { - Thread.sleep(100); - } - } - Assert.assertEquals("test diagnostics info", - appAttemptHD.getDiagnosticsInfo()); - Assert.assertEquals("test url", appAttemptHD.getTrackingURL()); - Assert.assertEquals(FinalApplicationStatus.UNDEFINED, - appAttemptHD.getFinalApplicationStatus()); - Assert.assertEquals(YarnApplicationAttemptState.FINISHED, - appAttemptHD.getYarnApplicationAttemptState()); - } - - @Test - public void testWriteContainer() throws Exception { - RMContainer container = - createRMContainer(ContainerId.newInstance( - ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1), - 1)); - writer.containerStarted(container); - ContainerHistoryData containerHD = null; - for (int i = 0; i < MAX_RETRIES; ++i) { - containerHD = - store.getContainer(ContainerId.newInstance(ApplicationAttemptId - .newInstance(ApplicationId.newInstance(0, 1), 1), 1)); - if (containerHD != null) { - break; - } else { - Thread.sleep(100); - } - } - Assert.assertNotNull(containerHD); - Assert.assertEquals(NodeId.newInstance("test host", -100), - containerHD.getAssignedNode()); - Assert.assertEquals(Resource.newInstance(-1, -1), - containerHD.getAllocatedResource()); - Assert.assertEquals(Priority.UNDEFINED, containerHD.getPriority()); - Assert.assertEquals(0L, container.getCreationTime()); - - writer.containerFinished(container); - for (int i = 0; i < MAX_RETRIES; ++i) { - containerHD = - store.getContainer(ContainerId.newInstance(ApplicationAttemptId - .newInstance(ApplicationId.newInstance(0, 1), 1), 1)); - if (containerHD.getContainerState() != null) { - break; - } else { - Thread.sleep(100); - } - } - Assert.assertEquals("test diagnostics info", - containerHD.getDiagnosticsInfo()); - Assert.assertEquals(-1, containerHD.getContainerExitStatus()); - Assert.assertEquals(ContainerState.COMPLETE, - containerHD.getContainerState()); - } - - @Test - public void testParallelWrite() throws Exception { - List appIds = new ArrayList(); - for (int i = 0; i < 10; ++i) { - Random rand = new Random(i); - ApplicationId appId = ApplicationId.newInstance(0, rand.nextInt()); - appIds.add(appId); - RMApp app = createRMApp(appId); - writer.applicationStarted(app); - for (int j = 1; j <= 10; ++j) { - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, j); - RMAppAttempt appAttempt = createRMAppAttempt(appAttemptId); - writer.applicationAttemptStarted(appAttempt); - for (int k = 1; k <= 10; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); - RMContainer container = createRMContainer(containerId); - writer.containerStarted(container); - writer.containerFinished(container); - } - writer.applicationAttemptFinished( - appAttempt, RMAppAttemptState.FINISHED); - } - writer.applicationFinished(app, RMAppState.FINISHED); - } - for (int i = 0; i < MAX_RETRIES; ++i) { - if (allEventsHandled(20 * 10 * 10 + 20 * 10 + 20)) { - break; - } else { - Thread.sleep(500); - } - } - Assert.assertTrue(allEventsHandled(20 * 10 * 10 + 20 * 10 + 20)); - // Validate all events of one application are handled by one dispatcher - for (ApplicationId appId : appIds) { - Assert.assertTrue(handledByOne(appId)); - } - } - - private boolean allEventsHandled(int expected) { - int actual = 0; - for (CounterDispatcher dispatcher : dispatchers) { - for (Integer count : dispatcher.counts.values()) { - actual += count; - } - } - return actual == expected; - } - - @Test - public void testRMWritingMassiveHistoryForFairSche() throws Exception { - //test WritingMassiveHistory for Fair Scheduler. - testRMWritingMassiveHistory(true); - } - - @Test - public void testRMWritingMassiveHistoryForCapacitySche() throws Exception { - //test WritingMassiveHistory for Capacity Scheduler. - testRMWritingMassiveHistory(false); - } - - private void testRMWritingMassiveHistory(boolean isFS) throws Exception { - // 1. Show RM can run with writing history data - // 2. Test additional workload of processing history events - YarnConfiguration conf = new YarnConfiguration(); - if (isFS) { - conf.setBoolean(FairSchedulerConfiguration.ASSIGN_MULTIPLE, true); - conf.set("yarn.resourcemanager.scheduler.class", - FairScheduler.class.getName()); - } else { - conf.set("yarn.resourcemanager.scheduler.class", - CapacityScheduler.class.getName()); - } - // don't process history events - MockRM rm = new MockRM(conf) { - @Override - protected RMApplicationHistoryWriter createRMApplicationHistoryWriter() { - return new RMApplicationHistoryWriter() { - @Override - public void applicationStarted(RMApp app) { - } - - @Override - public void applicationFinished(RMApp app, RMAppState finalState) { - } - - @Override - public void applicationAttemptStarted(RMAppAttempt appAttempt) { - } - - @Override - public void applicationAttemptFinished( - RMAppAttempt appAttempt, RMAppAttemptState finalState) { - } - - @Override - public void containerStarted(RMContainer container) { - } - - @Override - public void containerFinished(RMContainer container) { - } - }; - } - }; - long startTime1 = System.currentTimeMillis(); - testRMWritingMassiveHistory(rm); - long finishTime1 = System.currentTimeMillis(); - long elapsedTime1 = finishTime1 - startTime1; - rm = new MockRM(conf); - long startTime2 = System.currentTimeMillis(); - testRMWritingMassiveHistory(rm); - long finishTime2 = System.currentTimeMillis(); - long elapsedTime2 = finishTime2 - startTime2; - // No more than 10% additional workload - // Should be much less, but computation time is fluctuated - Assert.assertTrue(elapsedTime2 - elapsedTime1 < elapsedTime1 / 10); - } - - private void testRMWritingMassiveHistory(MockRM rm) throws Exception { - rm.start(); - MockNM nm = rm.registerNode("127.0.0.1:1234", 1024 * 10100); - - RMApp app = rm.submitApp(1024); - nm.nodeHeartbeat(true); - RMAppAttempt attempt = app.getCurrentAppAttempt(); - MockAM am = rm.sendAMLaunched(attempt.getAppAttemptId()); - am.registerAppAttempt(); - - int request = 10000; - am.allocate("127.0.0.1", 1024, request, new ArrayList()); - nm.nodeHeartbeat(true); - List allocated = - am.allocate(new ArrayList(), - new ArrayList()).getAllocatedContainers(); - int waitCount = 0; - int allocatedSize = allocated.size(); - while (allocatedSize < request && waitCount++ < 200) { - Thread.sleep(300); - allocated = - am.allocate(new ArrayList(), - new ArrayList()).getAllocatedContainers(); - allocatedSize += allocated.size(); - nm.nodeHeartbeat(true); - } - Assert.assertEquals(request, allocatedSize); - - am.unregisterAppAttempt(); - am.waitForState(RMAppAttemptState.FINISHING); - nm.nodeHeartbeat(am.getApplicationAttemptId(), 1, ContainerState.COMPLETE); - am.waitForState(RMAppAttemptState.FINISHED); - - NodeHeartbeatResponse resp = nm.nodeHeartbeat(true); - List cleaned = resp.getContainersToCleanup(); - int cleanedSize = cleaned.size(); - waitCount = 0; - while (cleanedSize < allocatedSize && waitCount++ < 200) { - Thread.sleep(300); - resp = nm.nodeHeartbeat(true); - cleaned = resp.getContainersToCleanup(); - cleanedSize += cleaned.size(); - } - Assert.assertEquals(allocatedSize, cleanedSize); - rm.waitForState(app.getApplicationId(), RMAppState.FINISHED); - - rm.stop(); - } - - private boolean handledByOne(ApplicationId appId) { - int count = 0; - for (CounterDispatcher dispatcher : dispatchers) { - if (dispatcher.counts.containsKey(appId)) { - ++count; - } - } - return count == 1; - } - - private static class CounterDispatcher extends AsyncDispatcher { - - private Map counts = - new HashMap(); - - @SuppressWarnings("rawtypes") - @Override - protected void dispatch(Event event) { - if (event instanceof WritingApplicationHistoryEvent) { - WritingApplicationHistoryEvent ashEvent = - (WritingApplicationHistoryEvent) event; - switch (ashEvent.getType()) { - case APP_START: - incrementCounts(((WritingApplicationStartEvent) event) - .getApplicationId()); - break; - case APP_FINISH: - incrementCounts(((WritingApplicationFinishEvent) event) - .getApplicationId()); - break; - case APP_ATTEMPT_START: - incrementCounts(((WritingApplicationAttemptStartEvent) event) - .getApplicationAttemptId().getApplicationId()); - break; - case APP_ATTEMPT_FINISH: - incrementCounts(((WritingApplicationAttemptFinishEvent) event) - .getApplicationAttemptId().getApplicationId()); - break; - case CONTAINER_START: - incrementCounts(((WritingContainerStartEvent) event) - .getContainerId().getApplicationAttemptId().getApplicationId()); - break; - case CONTAINER_FINISH: - incrementCounts(((WritingContainerFinishEvent) event) - .getContainerId().getApplicationAttemptId().getApplicationId()); - break; - } - } - super.dispatch(event); - } - - private void incrementCounts(ApplicationId appId) { - Integer val = counts.get(appId); - if (val == null) { - counts.put(appId, 1); - } else { - counts.put(appId, val + 1); - } - } - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationSystemTestUtil.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationSystemTestUtil.java index 71b5b8b..c653425 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationSystemTestUtil.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationSystemTestUtil.java @@ -67,7 +67,7 @@ public CapacityScheduler mockCapacityScheduler(int numContainers) Mockito.spy(new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null)); + new ClientToAMTokenSecretManagerInRM())); cs.setRMContext(mockRmContext); try { cs.serviceInit(conf); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java index c837450..176cb7b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java @@ -72,7 +72,7 @@ public void setUp() { // Dispatcher that processes events inline Dispatcher dispatcher = new InlineDispatcher(); RMContext context = new RMContextImpl(dispatcher, null, - null, null, null, null, null, null, null, null); + null, null, null, null, null, null, null); dispatcher.register(SchedulerEventType.class, new InlineDispatcher.EmptyEventHandler()); dispatcher.register(RMNodeEventType.class, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMReconnect.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMReconnect.java index d16d551..8873e27 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMReconnect.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMReconnect.java @@ -74,7 +74,7 @@ public void setUp() { new TestRMNodeEventDispatcher()); RMContext context = new RMContextImpl(dispatcher, null, - null, null, null, null, null, null, null, null); + null, null, null, null, null, null, null); dispatcher.register(SchedulerEventType.class, new InlineDispatcher.EmptyEventHandler()); dispatcher.register(RMNodeEventType.class, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java index 4f94695..ddb7a90 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java @@ -71,7 +71,7 @@ public void handle(Event event) { RMContext context = new RMContextImpl(dispatcher, null, null, null, null, null, new RMContainerTokenSecretManager(conf), - new NMTokenSecretManagerInRM(conf), null, null); + new NMTokenSecretManagerInRM(conf), null); dispatcher.register(RMNodeEventType.class, new ResourceManager.NodeEventDispatcher(context)); NodesListManager nodesListManager = new NodesListManager(context); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java index 457f21e..eed4fbc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java @@ -54,7 +54,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; import org.apache.hadoop.yarn.server.resourcemanager.RMServerUtils; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.ApplicationState; @@ -94,7 +93,6 @@ private static int appId = 1; private DrainDispatcher rmDispatcher; private RMStateStore store; - private RMApplicationHistoryWriter writer; private SystemMetricsPublisher publisher; private YarnScheduler scheduler; private TestSchedulerEventDispatcher schedulerDispatcher; @@ -195,15 +193,13 @@ public void setUp() throws Exception { AMLivelinessMonitor amLivelinessMonitor = mock(AMLivelinessMonitor.class); AMLivelinessMonitor amFinishingMonitor = mock(AMLivelinessMonitor.class); store = mock(RMStateStore.class); - writer = mock(RMApplicationHistoryWriter.class); RMContext realRMContext = new RMContextImpl(rmDispatcher, containerAllocationExpirer, amLivelinessMonitor, amFinishingMonitor, null, new AMRMTokenSecretManager(conf, this.rmContext), new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), - writer); + new ClientToAMTokenSecretManagerInRM()); ((RMContextImpl)realRMContext).setStateStore(store); publisher = mock(SystemMetricsPublisher.class); ((RMContextImpl)realRMContext).setSystemMetricsPublisher(publisher); @@ -357,7 +353,6 @@ private void sendAttemptUpdateSavedEvent(RMApp application) { protected RMApp testCreateAppNewSaving( ApplicationSubmissionContext submissionContext) throws IOException { RMApp application = createNewTestApp(submissionContext); - verify(writer).applicationStarted(any(RMApp.class)); verify(publisher).appCreated(any(RMApp.class), anyLong()); // NEW => NEW_SAVING event RMAppEventType.START RMAppEvent event = @@ -481,7 +476,6 @@ public void testUnmanagedApp() throws IOException { application.getDiagnostics().indexOf(diagMsg) != -1); // reset the counter of Mockito.verify - reset(writer); reset(publisher); // test app fails after 1 app attempt failure @@ -968,9 +962,6 @@ public void testGetAppReport() { private void verifyApplicationFinished(RMAppState state) { ArgumentCaptor finalState = ArgumentCaptor.forClass(RMAppState.class); - verify(writer).applicationFinished(any(RMApp.class), finalState.capture()); - Assert.assertEquals(state, finalState.getValue()); - finalState = ArgumentCaptor.forClass(RMAppState.class); verify(publisher).appFinished(any(RMApp.class), finalState.capture(), anyLong()); Assert.assertEquals(state, finalState.getValue()); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java index 7f27f4e..d5df78a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java @@ -71,7 +71,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEventType; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher; @@ -142,7 +141,6 @@ private ApplicationMasterLauncher applicationMasterLauncher; private AMLivelinessMonitor amLivelinessMonitor; private AMLivelinessMonitor amFinishingMonitor; - private RMApplicationHistoryWriter writer; private SystemMetricsPublisher publisher; private RMStateStore store; @@ -243,7 +241,6 @@ public void setUp() throws Exception { mock(ContainerAllocationExpirer.class); amLivelinessMonitor = mock(AMLivelinessMonitor.class); amFinishingMonitor = mock(AMLivelinessMonitor.class); - writer = mock(RMApplicationHistoryWriter.class); MasterKeyData masterKeyData = amRMTokenManager.createNewMasterKey(); when(amRMTokenManager.getMasterKey()).thenReturn(masterKeyData); rmContext = @@ -252,8 +249,7 @@ public void setUp() throws Exception { null, amRMTokenManager, new RMContainerTokenSecretManager(conf), nmTokenManager, - clientToAMTokenManager, - writer); + clientToAMTokenManager); store = mock(RMStateStore.class); ((RMContextImpl) rmContext).setStateStore(store); @@ -1473,11 +1469,6 @@ private void verifyAMHostAndPortInvalidated() { private void verifyApplicationAttemptFinished(RMAppAttemptState state) { ArgumentCaptor finalState = ArgumentCaptor.forClass(RMAppAttemptState.class); - verify(writer).applicationAttemptFinished( - any(RMAppAttempt.class), finalState.capture()); - Assert.assertEquals(state, finalState.getValue()); - finalState = - ArgumentCaptor.forClass(RMAppAttemptState.class); verify(publisher).appAttemptFinished(any(RMAppAttempt.class), finalState.capture(), any(RMApp.class), anyLong()); Assert.assertEquals(state, finalState.getValue()); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java index 553587e..08b69f0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java @@ -51,7 +51,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.MockNM; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent; @@ -99,12 +98,10 @@ public void testReleaseWhileRunning() { when(rmApp.getRMAppAttempt((ApplicationAttemptId)Matchers.any())).thenReturn(null); Mockito.doReturn(rmApp).when(rmApps).get((ApplicationId)Matchers.any()); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); RMContext rmContext = mock(RMContext.class); when(rmContext.getDispatcher()).thenReturn(drainDispatcher); when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer); - when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); when(rmContext.getRMApps()).thenReturn(rmApps); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); RMContainer rmContainer = new RMContainerImpl(container, appAttemptId, @@ -114,7 +111,6 @@ public void testReleaseWhileRunning() { assertEquals(resource, rmContainer.getAllocatedResource()); assertEquals(nodeId, rmContainer.getAllocatedNode()); assertEquals(priority, rmContainer.getAllocatedPriority()); - verify(writer).containerStarted(any(RMContainer.class)); verify(publisher).containerCreated(any(RMContainer.class), anyLong()); rmContainer.handle(new RMContainerEvent(containerId, @@ -147,7 +143,6 @@ public void testReleaseWhileRunning() { assertEquals(ContainerExitStatus.ABORTED, rmContainer.getContainerExitStatus()); assertEquals(ContainerState.COMPLETE, rmContainer.getContainerState()); - verify(writer).containerFinished(any(RMContainer.class)); verify(publisher).containerFinished(any(RMContainer.class), anyLong()); ArgumentCaptor captor = ArgumentCaptor @@ -189,12 +184,10 @@ public void testExpireWhileRunning() { Container container = BuilderUtils.newContainer(containerId, nodeId, "host:3465", resource, priority, null); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); RMContext rmContext = mock(RMContext.class); when(rmContext.getDispatcher()).thenReturn(drainDispatcher); when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer); - when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); RMContainer rmContainer = new RMContainerImpl(container, appAttemptId, nodeId, "user", rmContext); @@ -203,7 +196,6 @@ public void testExpireWhileRunning() { assertEquals(resource, rmContainer.getAllocatedResource()); assertEquals(nodeId, rmContainer.getAllocatedNode()); assertEquals(priority, rmContainer.getAllocatedPriority()); - verify(writer).containerStarted(any(RMContainer.class)); verify(publisher).containerCreated(any(RMContainer.class), anyLong()); rmContainer.handle(new RMContainerEvent(containerId, @@ -232,7 +224,6 @@ public void testExpireWhileRunning() { containerStatus, RMContainerEventType.EXPIRE)); drainDispatcher.await(); assertEquals(RMContainerState.RUNNING, rmContainer.getState()); - verify(writer, never()).containerFinished(any(RMContainer.class)); verify(publisher, never()).containerFinished(any(RMContainer.class), anyLong()); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java index f7c098c..079d163 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java @@ -394,7 +394,7 @@ public void testRefreshQueues() throws Exception { RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null); + new ClientToAMTokenSecretManagerInRM()); setupQueueConfiguration(conf); cs.setConf(new YarnConfiguration()); cs.setRMContext(resourceManager.getRMContext()); @@ -500,7 +500,7 @@ public void testParseQueue() throws IOException { cs.reinitialize(conf, new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null)); + new ClientToAMTokenSecretManagerInRM())); } @Test @@ -516,7 +516,7 @@ public void testReconnectedNode() throws Exception { cs.reinitialize(csConf, new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(csConf), new NMTokenSecretManagerInRM(csConf), - new ClientToAMTokenSecretManagerInRM(), null)); + new ClientToAMTokenSecretManagerInRM())); RMNode n1 = MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1); RMNode n2 = MockNodes.newNodeInfo(0, MockNodes.newResource(2 * GB), 2); @@ -547,7 +547,7 @@ public void testRefreshQueuesWithNewQueue() throws Exception { cs.reinitialize(conf, new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null)); + new ClientToAMTokenSecretManagerInRM())); checkQueueCapacities(cs, A_CAPACITY, B_CAPACITY); // Add a new queue b4 @@ -965,7 +965,7 @@ public void testNumClusterNodes() throws Exception { RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null); + new ClientToAMTokenSecretManagerInRM()); cs.setRMContext(rmContext); CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java index fdb9028..7cb90c0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java @@ -43,7 +43,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; @@ -249,12 +248,10 @@ public void testSortedQueues() throws Exception { ContainerAllocationExpirer expirer = mock(ContainerAllocationExpirer.class); DrainDispatcher drainDispatcher = new DrainDispatcher(); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); RMContext rmContext = mock(RMContext.class); when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer); when(rmContext.getDispatcher()).thenReturn(drainDispatcher); - when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( app_0.getApplicationId(), 1); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueMappings.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueMappings.java index f573f43..587890a4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueMappings.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueMappings.java @@ -82,7 +82,7 @@ public void testQueueMapping() throws Exception { RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null); + new ClientToAMTokenSecretManagerInRM()); cs.setConf(conf); cs.setRMContext(rmContext); cs.init(conf); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueParsing.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueParsing.java index a3b990c..d5462a1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueParsing.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueParsing.java @@ -46,7 +46,7 @@ public void testQueueParsing() throws Exception { RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null); + new ClientToAMTokenSecretManagerInRM()); capacityScheduler.setConf(conf); capacityScheduler.setRMContext(rmContext); capacityScheduler.init(conf); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java index 0f8290e..37a3463 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java @@ -49,7 +49,6 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; @@ -629,7 +628,6 @@ public void testGetAppToUnreserve() throws Exception { Priority priorityMap = TestUtils.createMockPriority(5); Resource capability = Resources.createResource(2*GB, 0); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); RMContext rmContext = mock(RMContext.class); ContainerAllocationExpirer expirer = @@ -637,7 +635,6 @@ public void testGetAppToUnreserve() throws Exception { DrainDispatcher drainDispatcher = new DrainDispatcher(); when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer); when(rmContext.getDispatcher()).thenReturn(drainDispatcher); - when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( app_0.getApplicationId(), 1); @@ -693,7 +690,6 @@ public void testFindNodeToUnreserve() throws Exception { Priority priorityMap = TestUtils.createMockPriority(5); Resource capability = Resources.createResource(2 * GB, 0); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class); RMContext rmContext = mock(RMContext.class); ContainerAllocationExpirer expirer = @@ -701,7 +697,6 @@ public void testFindNodeToUnreserve() throws Exception { DrainDispatcher drainDispatcher = new DrainDispatcher(); when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer); when(rmContext.getDispatcher()).thenReturn(drainDispatcher); - when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer); when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( app_0.getApplicationId(), 1); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java index 9cb902d..2b1c128 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java @@ -41,7 +41,6 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; @@ -86,13 +85,12 @@ public EventHandler getEventHandler() { new ContainerAllocationExpirer(nullDispatcher); Configuration conf = new Configuration(); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); RMContextImpl rmContext = new RMContextImpl(nullDispatcher, cae, null, null, null, new AMRMTokenSecretManager(conf, null), new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), writer); + new ClientToAMTokenSecretManagerInRM()); rmContext.setSystemMetricsPublisher(mock(SystemMetricsPublisher.class)); return rmContext; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java index b4c4c10..2794474 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java @@ -55,7 +55,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.Task; -import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; @@ -143,9 +142,8 @@ public void testFifoSchedulerCapacityWhenNoNMs() { @Test(timeout=5000) public void testAppAttemptMetrics() throws Exception { AsyncDispatcher dispatcher = new InlineDispatcher(); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); RMContext rmContext = new RMContextImpl(dispatcher, null, - null, null, null, null, null, null, null, writer); + null, null, null, null, null, null, null); ((RMContextImpl) rmContext).setSystemMetricsPublisher( mock(SystemMetricsPublisher.class)); @@ -188,9 +186,8 @@ public void testNodeLocalAssignment() throws Exception { NMTokenSecretManagerInRM nmTokenSecretManager = new NMTokenSecretManagerInRM(conf); nmTokenSecretManager.rollMasterKey(); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); RMContext rmContext = new RMContextImpl(dispatcher, null, null, null, null, - null, containerTokenSecretManager, nmTokenSecretManager, null, writer); + null, containerTokenSecretManager, nmTokenSecretManager, null); ((RMContextImpl) rmContext).setSystemMetricsPublisher( mock(SystemMetricsPublisher.class)); @@ -259,9 +256,8 @@ public void testUpdateResourceOnNode() throws Exception { NMTokenSecretManagerInRM nmTokenSecretManager = new NMTokenSecretManagerInRM(conf); nmTokenSecretManager.rollMasterKey(); - RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class); RMContext rmContext = new RMContextImpl(dispatcher, null, null, null, null, - null, containerTokenSecretManager, nmTokenSecretManager, null, writer); + null, containerTokenSecretManager, nmTokenSecretManager, null); ((RMContextImpl) rmContext).setSystemMetricsPublisher( mock(SystemMetricsPublisher.class)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebApp.java index 0df7c0d..2202571 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebApp.java @@ -163,7 +163,7 @@ public static RMContext mockRMContext(int numApps, int racks, int numNodes, deactivatedNodesMap.put(node.getHostName(), node); } return new RMContextImpl(null, null, null, null, - null, null, null, null, null, null) { + null, null, null, null, null) { @Override public ConcurrentMap getRMApps() { return applicationsMaps; @@ -206,7 +206,7 @@ public static CapacityScheduler mockCapacityScheduler() throws IOException { cs.setRMContext(new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null)); + new ClientToAMTokenSecretManagerInRM())); cs.init(conf); return cs; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebAppFairScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebAppFairScheduler.java index f07cb8d..a4f3396 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebAppFairScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebAppFairScheduler.java @@ -150,7 +150,7 @@ public YarnApplicationState createApplicationState() { } return new RMContextImpl(null, null, null, null, - null, null, null, null, null, null) { + null, null, null, null, null) { @Override public ConcurrentMap getRMApps() { return applicationsMaps; @@ -173,7 +173,7 @@ private static FairScheduler mockFairScheduler() throws IOException { fs.setRMContext(new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), - new ClientToAMTokenSecretManagerInRM(), null)); + new ClientToAMTokenSecretManagerInRM())); fs.init(conf); return fs; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index b5b7a43..732ce3f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -55,8 +55,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer; -import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore; import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; @@ -678,8 +676,6 @@ public ApplicationHistoryServerWrapper() { protected synchronized void serviceInit(Configuration conf) throws Exception { appHistoryServer = new ApplicationHistoryServer(); - conf.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE, - MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class); conf.setClass(YarnConfiguration.TIMELINE_SERVICE_STORE, MemoryTimelineStore.class, TimelineStore.class); appHistoryServer.init(conf);