diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntities.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntities.java
deleted file mode 100644
index ed02cac..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntities.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.api.records.apptimeline;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- * The class that hosts a list of application timeline entities.
- */
-@XmlRootElement(name = "entities")
-@XmlAccessorType(XmlAccessType.NONE)
-@Public
-@Unstable
-public class ATSEntities {
-
- private List entities =
- new ArrayList();
-
- public ATSEntities() {
-
- }
-
- /**
- * Get a list of entities
- *
- * @return a list of entities
- */
- @XmlElement(name = "entities")
- public List getEntities() {
- return entities;
- }
-
- /**
- * Add a single entity into the existing entity list
- *
- * @param entity
- * a single entity
- */
- public void addEntity(ATSEntity entity) {
- entities.add(entity);
- }
-
- /**
- * All a list of entities into the existing entity list
- *
- * @param entities
- * a list of entities
- */
- public void addEntities(List entities) {
- this.entities.addAll(entities);
- }
-
- /**
- * Set the entity list to the given list of entities
- *
- * @param entities
- * a list of entities
- */
- public void setEntities(List entities) {
- this.entities = entities;
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java
deleted file mode 100644
index 709c795..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java
+++ /dev/null
@@ -1,401 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.api.records.apptimeline;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- *
- * The class that contains the the meta information of some conceptual entity of
- * an application and its related events. The entity can be an application, an
- * application attempt, a container or whatever the user-defined object.
- *
- *
- *
- * Primary filters will be used to index the entities in
- * ApplicationTimelineStore, such that users should carefully
- * choose the information they want to store as the primary filters. The
- * remaining can be stored as other information.
- *
- */
-@XmlRootElement(name = "entity")
-@XmlAccessorType(XmlAccessType.NONE)
-@Public
-@Unstable
-public class ATSEntity implements Comparable {
-
- private String entityType;
- private String entityId;
- private Long startTime;
- private List events = new ArrayList();
- private Map> relatedEntities =
- new HashMap>();
- private Map primaryFilters =
- new HashMap();
- private Map otherInfo =
- new HashMap();
-
- public ATSEntity() {
-
- }
-
- /**
- * Get the entity type
- *
- * @return the entity type
- */
- @XmlElement(name = "entitytype")
- public String getEntityType() {
- return entityType;
- }
-
- /**
- * Set the entity type
- *
- * @param entityType
- * the entity type
- */
- public void setEntityType(String entityType) {
- this.entityType = entityType;
- }
-
- /**
- * Get the entity Id
- *
- * @return the entity Id
- */
- @XmlElement(name = "entity")
- public String getEntityId() {
- return entityId;
- }
-
- /**
- * Set the entity Id
- *
- * @param entityId
- * the entity Id
- */
- public void setEntityId(String entityId) {
- this.entityId = entityId;
- }
-
- /**
- * Get the start time of the entity
- *
- * @return the start time of the entity
- */
- @XmlElement(name = "starttime")
- public Long getStartTime() {
- return startTime;
- }
-
- /**
- * Set the start time of the entity
- *
- * @param startTime
- * the start time of the entity
- */
- public void setStartTime(Long startTime) {
- this.startTime = startTime;
- }
-
- /**
- * Get a list of events related to the entity
- *
- * @return a list of events related to the entity
- */
- @XmlElement(name = "events")
- public List getEvents() {
- return events;
- }
-
- /**
- * Add a single event related to the entity to the existing event list
- *
- * @param event
- * a single event related to the entity
- */
- public void addEvent(ATSEvent event) {
- events.add(event);
- }
-
- /**
- * Add a list of events related to the entity to the existing event list
- *
- * @param events
- * a list of events related to the entity
- */
- public void addEvents(List events) {
- this.events.addAll(events);
- }
-
- /**
- * Set the event list to the given list of events related to the entity
- *
- * @param events
- * events a list of events related to the entity
- */
- public void setEvents(List events) {
- this.events = events;
- }
-
- /**
- * Get the related entities
- *
- * @return the related entities
- */
- @XmlElement(name = "relatedentities")
- public Map> getRelatedEntities() {
- return relatedEntities;
- }
-
- /**
- * Add an entity to the existing related entity map
- *
- * @param entityType
- * the entity type
- * @param entityId
- * the entity Id
- */
- public void addRelatedEntity(String entityType, String entityId) {
- List thisRelatedEntity = relatedEntities.get(entityType);
- if (thisRelatedEntity == null) {
- thisRelatedEntity = new ArrayList();
- relatedEntities.put(entityType, thisRelatedEntity);
- }
- thisRelatedEntity.add(entityId);
- }
-
- /**
- * Add a map of related entities to the existing related entity map
- *
- * @param relatedEntities
- * a map of related entities
- */
- public void addRelatedEntities(Map> relatedEntities) {
- for (Entry> relatedEntity :
- relatedEntities.entrySet()) {
- List thisRelatedEntity =
- this.relatedEntities.get(relatedEntity.getKey());
- if (thisRelatedEntity == null) {
- this.relatedEntities.put(
- relatedEntity.getKey(), relatedEntity.getValue());
- } else {
- thisRelatedEntity.addAll(relatedEntity.getValue());
- }
- }
- }
-
- /**
- * Set the related entity map to the given map of related entities
- *
- * @param relatedEntities
- * a map of related entities
- */
- public void setRelatedEntities(
- Map> relatedEntities) {
- this.relatedEntities = relatedEntities;
- }
-
- /**
- * Get the primary filters
- *
- * @return the primary filters
- */
- @XmlElement(name = "primaryfilters")
- public Map getPrimaryFilters() {
- return primaryFilters;
- }
-
- /**
- * Add a single piece of primary filter to the existing primary filter map
- *
- * @param key
- * the primary filter key
- * @param value
- * the primary filter value
- */
- public void addPrimaryFilter(String key, Object value) {
- primaryFilters.put(key, value);
- }
-
- /**
- * Add a map of primary filters to the existing primary filter map
- *
- * @param primaryFilters
- * a map of primary filters
- */
- public void addPrimaryFilters(Map primaryFilters) {
- this.primaryFilters.putAll(primaryFilters);
- }
-
- /**
- * Set the primary filter map to the given map of primary filters
- *
- * @param primaryFilters
- * a map of primary filters
- */
- public void setPrimaryFilters(Map primaryFilters) {
- this.primaryFilters = primaryFilters;
- }
-
- /**
- * Get the other information of the entity
- *
- * @return the other information of the entity
- */
- @XmlElement(name = "otherinfo")
- public Map getOtherInfo() {
- return otherInfo;
- }
-
- /**
- * Add one piece of other information of the entity to the existing other info
- * map
- *
- * @param key
- * the other information key
- * @param value
- * the other information value
- */
- public void addOtherInfo(String key, Object value) {
- this.otherInfo.put(key, value);
- }
-
- /**
- * Add a map of other information of the entity to the existing other info map
- *
- * @param otherInfo
- * a map of other information
- */
- public void addOtherInfo(Map otherInfo) {
- this.otherInfo.putAll(otherInfo);
- }
-
- /**
- * Set the other info map to the given map of other information
- *
- * @param otherInfo
- * a map of other information
- */
- public void setOtherInfo(Map otherInfo) {
- this.otherInfo = otherInfo;
- }
-
- @Override
- public int hashCode() {
- // generated by eclipse
- final int prime = 31;
- int result = 1;
- result = prime * result + ((entityId == null) ? 0 : entityId.hashCode());
- result =
- prime * result + ((entityType == null) ? 0 : entityType.hashCode());
- result = prime * result + ((events == null) ? 0 : events.hashCode());
- result = prime * result + ((otherInfo == null) ? 0 : otherInfo.hashCode());
- result =
- prime * result
- + ((primaryFilters == null) ? 0 : primaryFilters.hashCode());
- result =
- prime * result
- + ((relatedEntities == null) ? 0 : relatedEntities.hashCode());
- result = prime * result + ((startTime == null) ? 0 : startTime.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- // generated by eclipse
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- ATSEntity other = (ATSEntity) obj;
- if (entityId == null) {
- if (other.entityId != null)
- return false;
- } else if (!entityId.equals(other.entityId))
- return false;
- if (entityType == null) {
- if (other.entityType != null)
- return false;
- } else if (!entityType.equals(other.entityType))
- return false;
- if (events == null) {
- if (other.events != null)
- return false;
- } else if (!events.equals(other.events))
- return false;
- if (otherInfo == null) {
- if (other.otherInfo != null)
- return false;
- } else if (!otherInfo.equals(other.otherInfo))
- return false;
- if (primaryFilters == null) {
- if (other.primaryFilters != null)
- return false;
- } else if (!primaryFilters.equals(other.primaryFilters))
- return false;
- if (relatedEntities == null) {
- if (other.relatedEntities != null)
- return false;
- } else if (!relatedEntities.equals(other.relatedEntities))
- return false;
- if (startTime == null) {
- if (other.startTime != null)
- return false;
- } else if (!startTime.equals(other.startTime))
- return false;
- return true;
- }
-
- @Override
- public int compareTo(ATSEntity other) {
- int comparison = entityType.compareTo(other.entityType);
- if (comparison == 0) {
- long thisStartTime =
- startTime == null ? Long.MIN_VALUE : startTime;
- long otherStartTime =
- other.startTime == null ? Long.MIN_VALUE : other.startTime;
- if (thisStartTime > otherStartTime) {
- return -1;
- } else if (thisStartTime < otherStartTime) {
- return 1;
- } else {
- return entityId.compareTo(other.entityId);
- }
- } else {
- return comparison;
- }
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvent.java
deleted file mode 100644
index 27bac16..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvent.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.api.records.apptimeline;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- * The class that contains the information of an event that is related to some
- * conceptual entity of an application. Users are free to define what the event
- * means, such as starting an application, getting allocated a container and
- * etc.
- */
-@XmlRootElement(name = "event")
-@XmlAccessorType(XmlAccessType.NONE)
-@Public
-@Unstable
-public class ATSEvent implements Comparable {
-
- private long timestamp;
- private String eventType;
- private Map eventInfo = new HashMap();
-
- public ATSEvent() {
- }
-
- /**
- * Get the timestamp of the event
- *
- * @return the timestamp of the event
- */
- @XmlElement(name = "timestamp")
- public long getTimestamp() {
- return timestamp;
- }
-
- /**
- * Set the timestamp of the event
- *
- * @param timestamp
- * the timestamp of the event
- */
- public void setTimestamp(long timestamp) {
- this.timestamp = timestamp;
- }
-
- /**
- * Get the event type
- *
- * @return the event type
- */
- @XmlElement(name = "eventtype")
- public String getEventType() {
- return eventType;
- }
-
- /**
- * Set the event type
- *
- * @param eventType
- * the event type
- */
- public void setEventType(String eventType) {
- this.eventType = eventType;
- }
-
- /**
- * Set the information of the event
- *
- * @return the information of the event
- */
- @XmlElement(name = "eventinfo")
- public Map getEventInfo() {
- return eventInfo;
- }
-
- /**
- * Add one piece of the information of the event to the existing information
- * map
- *
- * @param key
- * the information key
- * @param value
- * the information value
- */
- public void addEventInfo(String key, Object value) {
- this.eventInfo.put(key, value);
- }
-
- /**
- * Add a map of the information of the event to the existing information map
- *
- * @param eventInfo
- * a map of of the information of the event
- */
- public void addEventInfo(Map eventInfo) {
- this.eventInfo.putAll(eventInfo);
- }
-
- /**
- * Set the information map to the given map of the information of the event
- *
- * @param eventInfo
- * a map of of the information of the event
- */
- public void setEventInfo(Map eventInfo) {
- this.eventInfo = eventInfo;
- }
-
- @Override
- public int compareTo(ATSEvent other) {
- if (timestamp > other.timestamp) {
- return -1;
- } else if (timestamp < other.timestamp) {
- return 1;
- } else {
- return eventType.compareTo(other.eventType);
- }
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
-
- ATSEvent atsEvent = (ATSEvent) o;
-
- if (timestamp != atsEvent.timestamp)
- return false;
- if (!eventType.equals(atsEvent.eventType))
- return false;
- if (eventInfo != null ? !eventInfo.equals(atsEvent.eventInfo) :
- atsEvent.eventInfo != null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- int result = (int) (timestamp ^ (timestamp >>> 32));
- result = 31 * result + eventType.hashCode();
- result = 31 * result + (eventInfo != null ? eventInfo.hashCode() : 0);
- return result;
- }
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvents.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvents.java
deleted file mode 100644
index a08537d..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvents.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.api.records.apptimeline;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- * The class that hosts a list of events, which are categorized according to
- * their related entities.
- */
-@XmlRootElement(name = "events")
-@XmlAccessorType(XmlAccessType.NONE)
-@Public
-@Unstable
-public class ATSEvents {
-
- private List allEvents =
- new ArrayList();
-
- public ATSEvents() {
-
- }
-
- /**
- * Get a list of {@link ATSEventsOfOneEntity} instances
- *
- * @return a list of {@link ATSEventsOfOneEntity} instances
- */
- @XmlElement(name = "events")
- public List getAllEvents() {
- return allEvents;
- }
-
- /**
- * Add a single {@link ATSEventsOfOneEntity} instance into the existing list
- *
- * @param eventsOfOneEntity
- * a single {@link ATSEventsOfOneEntity} instance
- */
- public void addEvent(ATSEventsOfOneEntity eventsOfOneEntity) {
- allEvents.add(eventsOfOneEntity);
- }
-
- /**
- * Add a list of {@link ATSEventsOfOneEntity} instances into the existing list
- *
- * @param allEvents
- * a list of {@link ATSEventsOfOneEntity} instances
- */
- public void addEvents(List allEvents) {
- this.allEvents.addAll(allEvents);
- }
-
- /**
- * Set the list to the given list of {@link ATSEventsOfOneEntity} instances
- *
- * @param allEvents
- * a list of {@link ATSEventsOfOneEntity} instances
- */
- public void setEvents(List allEvents) {
- this.allEvents.clear();
- this.allEvents.addAll(allEvents);
- }
-
- /**
- * The class that hosts a list of events that are only related to one entity.
- */
- @XmlRootElement(name = "events")
- @XmlAccessorType(XmlAccessType.NONE)
- @Public
- @Unstable
- public static class ATSEventsOfOneEntity {
-
- private String entityId;
- private String entityType;
- private List events = new ArrayList();
-
- public ATSEventsOfOneEntity() {
-
- }
-
- /**
- * Get the entity Id
- *
- * @return the entity Id
- */
- @XmlElement(name = "entity")
- public String getEntityId() {
- return entityId;
- }
-
- /**
- * Set the entity Id
- *
- * @param entityId
- * the entity Id
- */
- public void setEntityId(String entityId) {
- this.entityId = entityId;
- }
-
- /**
- * Get the entity type
- *
- * @return the entity type
- */
- @XmlElement(name = "entitytype")
- public String getEntityType() {
- return entityType;
- }
-
- /**
- * Set the entity type
- *
- * @param entityType
- * the entity type
- */
- public void setEntityType(String entityType) {
- this.entityType = entityType;
- }
-
- /**
- * Get a list of events
- *
- * @return a list of events
- */
- @XmlElement(name = "events")
- public List getEvents() {
- return events;
- }
-
- /**
- * Add a single event to the existing event list
- *
- * @param event
- * a single event
- */
- public void addEvent(ATSEvent event) {
- events.add(event);
- }
-
- /**
- * Add a list of event to the existing event list
- *
- * @param events
- * a list of events
- */
- public void addEvents(List events) {
- this.events.addAll(events);
- }
-
- /**
- * Set the event list to the given list of events
- *
- * @param events
- * a list of events
- */
- public void setEvents(List events) {
- this.events = events;
- }
-
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java
deleted file mode 100644
index d330eb4..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.api.records.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A class that holds a list of put errors. This is the response returned
- * when a list of {@link ATSEntity} objects is added to the application
- * timeline. If there are errors in storing individual entity objects,
- * they will be indicated in the list of errors.
- */
-@XmlRootElement(name = "errors")
-@XmlAccessorType(XmlAccessType.NONE)
-@Public
-@Unstable
-public class ATSPutErrors {
-
- private List errors = new ArrayList();
-
- public ATSPutErrors() {
-
- }
-
- /**
- * Get a list of {@link ATSPutError} instances
- *
- * @return a list of {@link ATSPutError} instances
- */
- @XmlElement(name = "errors")
- public List getErrors() {
- return errors;
- }
-
- /**
- * Add a single {@link ATSPutError} instance into the existing list
- *
- * @param error
- * a single {@link ATSPutError} instance
- */
- public void addError(ATSPutError error) {
- errors.add(error);
- }
-
- /**
- * Add a list of {@link ATSPutError} instances into the existing list
- *
- * @param errors
- * a list of {@link ATSPutError} instances
- */
- public void addErrors(List errors) {
- this.errors.addAll(errors);
- }
-
- /**
- * Set the list to the given list of {@link ATSPutError} instances
- *
- * @param errors
- * a list of {@link ATSPutError} instances
- */
- public void setErrors(List errors) {
- this.errors.clear();
- this.errors.addAll(errors);
- }
-
- /**
- * A class that holds the error code for one entity.
- */
- @XmlRootElement(name = "error")
- @XmlAccessorType(XmlAccessType.NONE)
- @Public
- @Unstable
- public static class ATSPutError {
- /**
- * Error code returned when no start time can be found when putting an
- * entity. This occurs when the entity does not already exist in the
- * store and it is put with no start time or events specified.
- */
- public static final int NO_START_TIME = 1;
- /**
- * Error code returned if an IOException is encountered when putting an
- * entity.
- */
- public static final int IO_EXCEPTION = 2;
-
- private String entityId;
- private String entityType;
- private int errorCode;
-
- /**
- * Get the entity Id
- *
- * @return the entity Id
- */
- @XmlElement(name = "entity")
- public String getEntityId() {
- return entityId;
- }
-
- /**
- * Set the entity Id
- *
- * @param entityId
- * the entity Id
- */
- public void setEntityId(String entityId) {
- this.entityId = entityId;
- }
-
- /**
- * Get the entity type
- *
- * @return the entity type
- */
- @XmlElement(name = "entitytype")
- public String getEntityType() {
- return entityType;
- }
-
- /**
- * Set the entity type
- *
- * @param entityType
- * the entity type
- */
- public void setEntityType(String entityType) {
- this.entityType = entityType;
- }
-
- /**
- * Get the error code
- *
- * @return an error code
- */
- @XmlElement(name = "errorcode")
- public int getErrorCode() {
- return errorCode;
- }
-
- /**
- * Set the error code to the given error code
- *
- * @param errorCode
- * an error code
- */
- public void setErrorCode(int errorCode) {
- this.errorCode = errorCode;
- }
-
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/package-info.java
deleted file mode 100644
index b57cad4..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-@InterfaceAudience.Public
-package org.apache.hadoop.yarn.api.records.apptimeline;
-import org.apache.hadoop.classification.InterfaceAudience;
-
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/Entities.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/Entities.java
new file mode 100644
index 0000000..bddafe8
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/Entities.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.api.records.timeline;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ * The class that hosts a list of timeline entities.
+ */
+@XmlRootElement(name = "entities")
+@XmlAccessorType(XmlAccessType.NONE)
+@Public
+@Unstable
+public class Entities {
+
+ private List entities =
+ new ArrayList();
+
+ public Entities() {
+
+ }
+
+ /**
+ * Get a list of entities
+ *
+ * @return a list of entities
+ */
+ @XmlElement(name = "entities")
+ public List getEntities() {
+ return entities;
+ }
+
+ /**
+ * Add a single entity into the existing entity list
+ *
+ * @param entity
+ * a single entity
+ */
+ public void addEntity(Entity entity) {
+ entities.add(entity);
+ }
+
+ /**
+ * All a list of entities into the existing entity list
+ *
+ * @param entities
+ * a list of entities
+ */
+ public void addEntities(List entities) {
+ this.entities.addAll(entities);
+ }
+
+ /**
+ * Set the entity list to the given list of entities
+ *
+ * @param entities
+ * a list of entities
+ */
+ public void setEntities(List entities) {
+ this.entities = entities;
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/Entity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/Entity.java
new file mode 100644
index 0000000..84667d1
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/Entity.java
@@ -0,0 +1,418 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.api.records.timeline;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ *
+ * The class that contains the the meta information of some conceptual entity of
+ * an application and its related events. The entity can be an application, an
+ * application attempt, a container or whatever the user-defined object.
+ *
+ *
+ *
+ * Primary filters will be used to index the entities in
+ * TimelineStore, such that users should carefully
+ * choose the information they want to store as the primary filters. The
+ * remaining can be stored as other information.
+ *
+ */
+@XmlRootElement(name = "entity")
+@XmlAccessorType(XmlAccessType.NONE)
+@Public
+@Unstable
+public class Entity implements Comparable {
+
+ private String entityType;
+ private String entityId;
+ private Long startTime;
+ private List events = new ArrayList();
+ private Map> relatedEntities =
+ new HashMap>();
+ private Map> primaryFilters =
+ new HashMap>();
+ private Map otherInfo =
+ new HashMap();
+
+ public Entity() {
+
+ }
+
+ /**
+ * Get the entity type
+ *
+ * @return the entity type
+ */
+ @XmlElement(name = "entitytype")
+ public String getEntityType() {
+ return entityType;
+ }
+
+ /**
+ * Set the entity type
+ *
+ * @param entityType
+ * the entity type
+ */
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ /**
+ * Get the entity Id
+ *
+ * @return the entity Id
+ */
+ @XmlElement(name = "entity")
+ public String getEntityId() {
+ return entityId;
+ }
+
+ /**
+ * Set the entity Id
+ *
+ * @param entityId
+ * the entity Id
+ */
+ public void setEntityId(String entityId) {
+ this.entityId = entityId;
+ }
+
+ /**
+ * Get the start time of the entity
+ *
+ * @return the start time of the entity
+ */
+ @XmlElement(name = "starttime")
+ public Long getStartTime() {
+ return startTime;
+ }
+
+ /**
+ * Set the start time of the entity
+ *
+ * @param startTime
+ * the start time of the entity
+ */
+ public void setStartTime(Long startTime) {
+ this.startTime = startTime;
+ }
+
+ /**
+ * Get a list of events related to the entity
+ *
+ * @return a list of events related to the entity
+ */
+ @XmlElement(name = "events")
+ public List getEvents() {
+ return events;
+ }
+
+ /**
+ * Add a single event related to the entity to the existing event list
+ *
+ * @param event
+ * a single event related to the entity
+ */
+ public void addEvent(Event event) {
+ events.add(event);
+ }
+
+ /**
+ * Add a list of events related to the entity to the existing event list
+ *
+ * @param events
+ * a list of events related to the entity
+ */
+ public void addEvents(List events) {
+ this.events.addAll(events);
+ }
+
+ /**
+ * Set the event list to the given list of events related to the entity
+ *
+ * @param events
+ * events a list of events related to the entity
+ */
+ public void setEvents(List events) {
+ this.events = events;
+ }
+
+ /**
+ * Get the related entities
+ *
+ * @return the related entities
+ */
+ @XmlElement(name = "relatedentities")
+ public Map> getRelatedEntities() {
+ return relatedEntities;
+ }
+
+ /**
+ * Add an entity to the existing related entity map
+ *
+ * @param entityType
+ * the entity type
+ * @param entityId
+ * the entity Id
+ */
+ public void addRelatedEntity(String entityType, String entityId) {
+ Set thisRelatedEntity = relatedEntities.get(entityType);
+ if (thisRelatedEntity == null) {
+ thisRelatedEntity = new HashSet();
+ relatedEntities.put(entityType, thisRelatedEntity);
+ }
+ thisRelatedEntity.add(entityId);
+ }
+
+ /**
+ * Add a map of related entities to the existing related entity map
+ *
+ * @param relatedEntities
+ * a map of related entities
+ */
+ public void addRelatedEntities(Map> relatedEntities) {
+ for (Entry> relatedEntity :
+ relatedEntities.entrySet()) {
+ Set thisRelatedEntity =
+ this.relatedEntities.get(relatedEntity.getKey());
+ if (thisRelatedEntity == null) {
+ this.relatedEntities.put(
+ relatedEntity.getKey(), relatedEntity.getValue());
+ } else {
+ thisRelatedEntity.addAll(relatedEntity.getValue());
+ }
+ }
+ }
+
+ /**
+ * Set the related entity map to the given map of related entities
+ *
+ * @param relatedEntities
+ * a map of related entities
+ */
+ public void setRelatedEntities(
+ Map> relatedEntities) {
+ this.relatedEntities = relatedEntities;
+ }
+
+ /**
+ * Get the primary filters
+ *
+ * @return the primary filters
+ */
+ @XmlElement(name = "primaryfilters")
+ public Map> getPrimaryFilters() {
+ return primaryFilters;
+ }
+
+ /**
+ * Add a single piece of primary filter to the existing primary filter map
+ *
+ * @param key
+ * the primary filter key
+ * @param value
+ * the primary filter value
+ */
+ public void addPrimaryFilter(String key, Object value) {
+ Set
*
* @param entities
- * the collection of {@link ATSEntity}
+ * the collection of {@link Entity}
* @return the error information if the post entities are not correctly stored
* @throws IOException
* @throws YarnException
*/
@Public
- public abstract ATSPutErrors postEntities(
- ATSEntity... entities) throws IOException, YarnException;
+ public abstract PutErrors postEntities(
+ Entity... entities) throws IOException, YarnException;
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index 3269b8b..5dafe51 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -29,9 +29,9 @@
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
@@ -50,7 +50,7 @@
public class TimelineClientImpl extends TimelineClient {
private static final Log LOG = LogFactory.getLog(TimelineClientImpl.class);
- private static final String RESOURCE_URI_STR = "/ws/v1/apptimeline/";
+ private static final String RESOURCE_URI_STR = "/ws/v1/timeline/";
private static final Joiner JOINER = Joiner.on("");
private Client client;
@@ -79,9 +79,9 @@ protected void serviceInit(Configuration conf) throws Exception {
}
@Override
- public ATSPutErrors postEntities(
- ATSEntity... entities) throws IOException, YarnException {
- ATSEntities entitiesContainer = new ATSEntities();
+ public PutErrors postEntities(
+ Entity... entities) throws IOException, YarnException {
+ Entities entitiesContainer = new Entities();
entitiesContainer.addEntities(Arrays.asList(entities));
ClientResponse resp = doPostingEntities(entitiesContainer);
if (resp.getClientResponseStatus() != ClientResponse.Status.OK) {
@@ -95,12 +95,12 @@ public ATSPutErrors postEntities(
}
throw new YarnException(msg);
}
- return resp.getEntity(ATSPutErrors.class);
+ return resp.getEntity(PutErrors.class);
}
@Private
@VisibleForTesting
- public ClientResponse doPostingEntities(ATSEntities entities) {
+ public ClientResponse doPostingEntities(Entities entities) {
WebResource webResource = client.resource(resURI);
return webResource.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java
index a3917a2..d12972e 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java
@@ -25,10 +25,10 @@
import static org.mockito.Mockito.when;
import junit.framework.Assert;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Event;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
@@ -58,7 +58,7 @@ public void tearDown() {
public void testPostEntities() throws Exception {
mockClientResponse(ClientResponse.Status.OK, false);
try {
- ATSPutErrors errors = client.postEntities(generateATSEntity());
+ PutErrors errors = client.postEntities(generateEntity());
Assert.assertEquals(0, errors.getErrors().size());
} catch (YarnException e) {
Assert.fail("Exception is not expected");
@@ -69,13 +69,13 @@ public void testPostEntities() throws Exception {
public void testPostEntitiesWithError() throws Exception {
mockClientResponse(ClientResponse.Status.OK, true);
try {
- ATSPutErrors errors = client.postEntities(generateATSEntity());
+ PutErrors errors = client.postEntities(generateEntity());
Assert.assertEquals(1, errors.getErrors().size());
Assert.assertEquals("test entity id", errors.getErrors().get(0)
.getEntityId());
Assert.assertEquals("test entity type", errors.getErrors().get(0)
.getEntityType());
- Assert.assertEquals(ATSPutErrors.ATSPutError.IO_EXCEPTION,
+ Assert.assertEquals(PutErrors.PutError.IO_EXCEPTION,
errors.getErrors().get(0).getErrorCode());
} catch (YarnException e) {
Assert.fail("Exception is not expected");
@@ -86,7 +86,7 @@ public void testPostEntitiesWithError() throws Exception {
public void testPostEntitiesNoResponse() throws Exception {
mockClientResponse(ClientResponse.Status.INTERNAL_SERVER_ERROR, false);
try {
- client.postEntities(generateATSEntity());
+ client.postEntities(generateEntity());
Assert.fail("Exception is expected");
} catch (YarnException e) {
Assert.assertTrue(e.getMessage().contains(
@@ -98,27 +98,27 @@ private ClientResponse mockClientResponse(ClientResponse.Status status,
boolean hasError) {
ClientResponse response = mock(ClientResponse.class);
doReturn(response).when(client)
- .doPostingEntities(any(ATSEntities.class));
+ .doPostingEntities(any(Entities.class));
when(response.getClientResponseStatus()).thenReturn(status);
- ATSPutErrors.ATSPutError error = new ATSPutErrors.ATSPutError();
+ PutErrors.PutError error = new PutErrors.PutError();
error.setEntityId("test entity id");
error.setEntityType("test entity type");
- error.setErrorCode(ATSPutErrors.ATSPutError.IO_EXCEPTION);
- ATSPutErrors errors = new ATSPutErrors();
+ error.setErrorCode(PutErrors.PutError.IO_EXCEPTION);
+ PutErrors errors = new PutErrors();
if (hasError) {
errors.addError(error);
}
- when(response.getEntity(ATSPutErrors.class)).thenReturn(errors);
+ when(response.getEntity(PutErrors.class)).thenReturn(errors);
return response;
}
- private static ATSEntity generateATSEntity() {
- ATSEntity entity = new ATSEntity();
+ private static Entity generateEntity() {
+ Entity entity = new Entity();
entity.setEntityId("entity id");
entity.setEntityType("entity type");
entity.setStartTime(System.currentTimeMillis());
for (int i = 0; i < 2; ++i) {
- ATSEvent event = new ATSEvent();
+ Event event = new Event();
event.setTimestamp(System.currentTimeMillis());
event.setEventType("test event type " + i);
event.addEventInfo("key1", "val1");
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/TimelineUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/TimelineUtils.java
deleted file mode 100644
index 4ab557e..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/TimelineUtils.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.util;
-
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Evolving;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.map.AnnotationIntrospector;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
-import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
-
-/**
- * The helper class for the timeline module.
- *
- */
-@Public
-@Evolving
-public class TimelineUtils {
-
- private static ObjectMapper mapper;
-
- static {
- mapper = new ObjectMapper();
- AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
- mapper.setAnnotationIntrospector(introspector);
- mapper.getSerializationConfig()
- .setSerializationInclusion(Inclusion.NON_NULL);
- }
-
- /**
- * Serialize a POJO object into a JSON string not in a pretty format
- *
- * @param o
- * an object to serialize
- * @return a JSON string
- * @throws IOException
- * @throws JsonMappingException
- * @throws JsonGenerationException
- */
- public static String dumpTimelineRecordtoJSON(Object o)
- throws JsonGenerationException, JsonMappingException, IOException {
- return dumpTimelineRecordtoJSON(o, false);
- }
-
- /**
- * Serialize a POJO object into a JSON string
- *
- * @param o
- * an object to serialize
- * @param pretty
- * whether in a pretty format or not
- * @return a JSON string
- * @throws IOException
- * @throws JsonMappingException
- * @throws JsonGenerationException
- */
- public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
- throws JsonGenerationException, JsonMappingException, IOException {
- if (pretty) {
- return mapper.defaultPrettyPrintingWriter().writeValueAsString(o);
- } else {
- return mapper.writeValueAsString(o);
- }
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
new file mode 100644
index 0000000..35d8560
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util.timeline;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.codehaus.jackson.JsonGenerationException;
+import org.codehaus.jackson.map.AnnotationIntrospector;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
+import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
+
+/**
+ * The helper class for the timeline module.
+ *
+ */
+@Public
+@Evolving
+public class TimelineUtils {
+
+ private static ObjectMapper mapper;
+
+ static {
+ mapper = new ObjectMapper();
+ AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
+ mapper.setAnnotationIntrospector(introspector);
+ mapper.getSerializationConfig()
+ .setSerializationInclusion(Inclusion.NON_NULL);
+ }
+
+ /**
+ * Serialize a POJO object into a JSON string not in a pretty format
+ *
+ * @param o
+ * an object to serialize
+ * @return a JSON string
+ * @throws IOException
+ * @throws JsonMappingException
+ * @throws JsonGenerationException
+ */
+ public static String dumpTimelineRecordtoJSON(Object o)
+ throws JsonGenerationException, JsonMappingException, IOException {
+ return dumpTimelineRecordtoJSON(o, false);
+ }
+
+ /**
+ * Serialize a POJO object into a JSON string
+ *
+ * @param o
+ * an object to serialize
+ * @param pretty
+ * whether in a pretty format or not
+ * @return a JSON string
+ * @throws IOException
+ * @throws JsonMappingException
+ * @throws JsonGenerationException
+ */
+ public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
+ throws JsonGenerationException, JsonMappingException, IOException {
+ if (pretty) {
+ return mapper.defaultPrettyPrintingWriter().writeValueAsString(o);
+ } else {
+ return mapper.writeValueAsString(o);
+ }
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/package-info.java
new file mode 100644
index 0000000..5c18a55
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Public
+package org.apache.hadoop.yarn.util.timeline;
+import org.apache.hadoop.classification.InterfaceAudience;
+
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index cc8b124..02faf80 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -1140,18 +1140,18 @@
org.apache.hadoop.yarn.server.applicationhistoryservice.FileSystemApplicationHistoryStore
-
+
- Store class name for application timeline store
- yarn.ats.store.class
- org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.LeveldbApplicationTimelineStore
+ Store class name for timeline store
+ yarn.timeline.store.class
+ org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore
- Store file name for leveldb application timeline store
- yarn.ats.leveldb-apptimeline-store.path
- ${yarn.log.dir}/ats
+ Store file name for leveldb timeline store
+ yarn.timeline.leveldb-timeline-store.path
+ ${yarn.log.dir}/timeline
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java
deleted file mode 100644
index 330e099..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.api.records.apptimeline;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import junit.framework.Assert;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError;
-import org.apache.hadoop.yarn.util.TimelineUtils;
-import org.junit.Test;
-
-public class TestApplicationTimelineRecords {
-
- private static final Log LOG =
- LogFactory.getLog(TestApplicationTimelineRecords.class);
-
- @Test
- public void testATSEntities() throws Exception {
- ATSEntities entities = new ATSEntities();
- for (int j = 0; j < 2; ++j) {
- ATSEntity entity = new ATSEntity();
- entity.setEntityId("entity id " + j);
- entity.setEntityType("entity type " + j);
- entity.setStartTime(System.currentTimeMillis());
- for (int i = 0; i < 2; ++i) {
- ATSEvent event = new ATSEvent();
- event.setTimestamp(System.currentTimeMillis());
- event.setEventType("event type " + i);
- event.addEventInfo("key1", "val1");
- event.addEventInfo("key2", "val2");
- entity.addEvent(event);
- }
- entity.addRelatedEntity("test ref type 1", "test ref id 1");
- entity.addRelatedEntity("test ref type 2", "test ref id 2");
- entity.addPrimaryFilter("pkey1", "pval1");
- entity.addPrimaryFilter("pkey2", "pval2");
- entity.addOtherInfo("okey1", "oval1");
- entity.addOtherInfo("okey2", "oval2");
- entities.addEntity(entity);
- }
- LOG.info("Entities in JSON:");
- LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true));
-
- Assert.assertEquals(2, entities.getEntities().size());
- ATSEntity entity1 = entities.getEntities().get(0);
- Assert.assertEquals("entity id 0", entity1.getEntityId());
- Assert.assertEquals("entity type 0", entity1.getEntityType());
- Assert.assertEquals(2, entity1.getRelatedEntities().size());
- Assert.assertEquals(2, entity1.getEvents().size());
- Assert.assertEquals(2, entity1.getPrimaryFilters().size());
- Assert.assertEquals(2, entity1.getOtherInfo().size());
- ATSEntity entity2 = entities.getEntities().get(1);
- Assert.assertEquals("entity id 1", entity2.getEntityId());
- Assert.assertEquals("entity type 1", entity2.getEntityType());
- Assert.assertEquals(2, entity2.getRelatedEntities().size());
- Assert.assertEquals(2, entity2.getEvents().size());
- Assert.assertEquals(2, entity2.getPrimaryFilters().size());
- Assert.assertEquals(2, entity2.getOtherInfo().size());
- }
-
- @Test
- public void testATSEvents() throws Exception {
- ATSEvents events = new ATSEvents();
- for (int j = 0; j < 2; ++j) {
- ATSEvents.ATSEventsOfOneEntity partEvents =
- new ATSEvents.ATSEventsOfOneEntity();
- partEvents.setEntityId("entity id " + j);
- partEvents.setEntityType("entity type " + j);
- for (int i = 0; i < 2; ++i) {
- ATSEvent event = new ATSEvent();
- event.setTimestamp(System.currentTimeMillis());
- event.setEventType("event type " + i);
- event.addEventInfo("key1", "val1");
- event.addEventInfo("key2", "val2");
- partEvents.addEvent(event);
- }
- events.addEvent(partEvents);
- }
- LOG.info("Events in JSON:");
- LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true));
-
- Assert.assertEquals(2, events.getAllEvents().size());
- ATSEvents.ATSEventsOfOneEntity partEvents1 = events.getAllEvents().get(0);
- Assert.assertEquals("entity id 0", partEvents1.getEntityId());
- Assert.assertEquals("entity type 0", partEvents1.getEntityType());
- Assert.assertEquals(2, partEvents1.getEvents().size());
- ATSEvent event11 = partEvents1.getEvents().get(0);
- Assert.assertEquals("event type 0", event11.getEventType());
- Assert.assertEquals(2, event11.getEventInfo().size());
- ATSEvent event12 = partEvents1.getEvents().get(1);
- Assert.assertEquals("event type 1", event12.getEventType());
- Assert.assertEquals(2, event12.getEventInfo().size());
- ATSEvents.ATSEventsOfOneEntity partEvents2 = events.getAllEvents().get(1);
- Assert.assertEquals("entity id 1", partEvents2.getEntityId());
- Assert.assertEquals("entity type 1", partEvents2.getEntityType());
- Assert.assertEquals(2, partEvents2.getEvents().size());
- ATSEvent event21 = partEvents2.getEvents().get(0);
- Assert.assertEquals("event type 0", event21.getEventType());
- Assert.assertEquals(2, event21.getEventInfo().size());
- ATSEvent event22 = partEvents2.getEvents().get(1);
- Assert.assertEquals("event type 1", event22.getEventType());
- Assert.assertEquals(2, event22.getEventInfo().size());
- }
-
- @Test
- public void testATSPutErrors() throws Exception {
- ATSPutErrors atsPutErrors = new ATSPutErrors();
- ATSPutError error1 = new ATSPutError();
- error1.setEntityId("entity id 1");
- error1.setEntityId("entity type 1");
- error1.setErrorCode(ATSPutError.NO_START_TIME);
- atsPutErrors.addError(error1);
- List errors = new ArrayList();
- errors.add(error1);
- ATSPutError error2 = new ATSPutError();
- error2.setEntityId("entity id 2");
- error2.setEntityId("entity type 2");
- error2.setErrorCode(ATSPutError.IO_EXCEPTION);
- errors.add(error2);
- atsPutErrors.addErrors(errors);
- LOG.info("Errors in JSON:");
- LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(atsPutErrors, true));
-
- Assert.assertEquals(3, atsPutErrors.getErrors().size());
- ATSPutError e = atsPutErrors.getErrors().get(0);
- Assert.assertEquals(error1.getEntityId(), e.getEntityId());
- Assert.assertEquals(error1.getEntityType(), e.getEntityType());
- Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
- e = atsPutErrors.getErrors().get(1);
- Assert.assertEquals(error1.getEntityId(), e.getEntityId());
- Assert.assertEquals(error1.getEntityType(), e.getEntityType());
- Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
- e = atsPutErrors.getErrors().get(2);
- Assert.assertEquals(error2.getEntityId(), e.getEntityId());
- Assert.assertEquals(error2.getEntityType(), e.getEntityType());
- Assert.assertEquals(error2.getErrorCode(), e.getErrorCode());
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java
new file mode 100644
index 0000000..573683b
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.api.records.timeline;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Event;
+import org.apache.hadoop.yarn.api.records.timeline.Events;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors.PutError;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import org.junit.Test;
+
+public class TestTimelineRecords {
+
+ private static final Log LOG =
+ LogFactory.getLog(TestTimelineRecords.class);
+
+ @Test
+ public void testEntities() throws Exception {
+ Entities entities = new Entities();
+ for (int j = 0; j < 2; ++j) {
+ Entity entity = new Entity();
+ entity.setEntityId("entity id " + j);
+ entity.setEntityType("entity type " + j);
+ entity.setStartTime(System.currentTimeMillis());
+ for (int i = 0; i < 2; ++i) {
+ Event event = new Event();
+ event.setTimestamp(System.currentTimeMillis());
+ event.setEventType("event type " + i);
+ event.addEventInfo("key1", "val1");
+ event.addEventInfo("key2", "val2");
+ entity.addEvent(event);
+ }
+ entity.addRelatedEntity("test ref type 1", "test ref id 1");
+ entity.addRelatedEntity("test ref type 2", "test ref id 2");
+ entity.addPrimaryFilter("pkey1", "pval1");
+ entity.addPrimaryFilter("pkey2", "pval2");
+ entity.addOtherInfo("okey1", "oval1");
+ entity.addOtherInfo("okey2", "oval2");
+ entities.addEntity(entity);
+ }
+ LOG.info("Entities in JSON:");
+ LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true));
+
+ Assert.assertEquals(2, entities.getEntities().size());
+ Entity entity1 = entities.getEntities().get(0);
+ Assert.assertEquals("entity id 0", entity1.getEntityId());
+ Assert.assertEquals("entity type 0", entity1.getEntityType());
+ Assert.assertEquals(2, entity1.getRelatedEntities().size());
+ Assert.assertEquals(2, entity1.getEvents().size());
+ Assert.assertEquals(2, entity1.getPrimaryFilters().size());
+ Assert.assertEquals(2, entity1.getOtherInfo().size());
+ Entity entity2 = entities.getEntities().get(1);
+ Assert.assertEquals("entity id 1", entity2.getEntityId());
+ Assert.assertEquals("entity type 1", entity2.getEntityType());
+ Assert.assertEquals(2, entity2.getRelatedEntities().size());
+ Assert.assertEquals(2, entity2.getEvents().size());
+ Assert.assertEquals(2, entity2.getPrimaryFilters().size());
+ Assert.assertEquals(2, entity2.getOtherInfo().size());
+ }
+
+ @Test
+ public void testEvents() throws Exception {
+ Events events = new Events();
+ for (int j = 0; j < 2; ++j) {
+ Events.EventsOfOneEntity partEvents =
+ new Events.EventsOfOneEntity();
+ partEvents.setEntityId("entity id " + j);
+ partEvents.setEntityType("entity type " + j);
+ for (int i = 0; i < 2; ++i) {
+ Event event = new Event();
+ event.setTimestamp(System.currentTimeMillis());
+ event.setEventType("event type " + i);
+ event.addEventInfo("key1", "val1");
+ event.addEventInfo("key2", "val2");
+ partEvents.addEvent(event);
+ }
+ events.addEvent(partEvents);
+ }
+ LOG.info("Events in JSON:");
+ LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true));
+
+ Assert.assertEquals(2, events.getAllEvents().size());
+ Events.EventsOfOneEntity partEvents1 = events.getAllEvents().get(0);
+ Assert.assertEquals("entity id 0", partEvents1.getEntityId());
+ Assert.assertEquals("entity type 0", partEvents1.getEntityType());
+ Assert.assertEquals(2, partEvents1.getEvents().size());
+ Event event11 = partEvents1.getEvents().get(0);
+ Assert.assertEquals("event type 0", event11.getEventType());
+ Assert.assertEquals(2, event11.getEventInfo().size());
+ Event event12 = partEvents1.getEvents().get(1);
+ Assert.assertEquals("event type 1", event12.getEventType());
+ Assert.assertEquals(2, event12.getEventInfo().size());
+ Events.EventsOfOneEntity partEvents2 = events.getAllEvents().get(1);
+ Assert.assertEquals("entity id 1", partEvents2.getEntityId());
+ Assert.assertEquals("entity type 1", partEvents2.getEntityType());
+ Assert.assertEquals(2, partEvents2.getEvents().size());
+ Event event21 = partEvents2.getEvents().get(0);
+ Assert.assertEquals("event type 0", event21.getEventType());
+ Assert.assertEquals(2, event21.getEventInfo().size());
+ Event event22 = partEvents2.getEvents().get(1);
+ Assert.assertEquals("event type 1", event22.getEventType());
+ Assert.assertEquals(2, event22.getEventInfo().size());
+ }
+
+ @Test
+ public void testPutErrors() throws Exception {
+ PutErrors putErrors = new PutErrors();
+ PutError error1 = new PutError();
+ error1.setEntityId("entity id 1");
+ error1.setEntityId("entity type 1");
+ error1.setErrorCode(PutError.NO_START_TIME);
+ putErrors.addError(error1);
+ List errors = new ArrayList();
+ errors.add(error1);
+ PutError error2 = new PutError();
+ error2.setEntityId("entity id 2");
+ error2.setEntityId("entity type 2");
+ error2.setErrorCode(PutError.IO_EXCEPTION);
+ errors.add(error2);
+ putErrors.addErrors(errors);
+ LOG.info("Errors in JSON:");
+ LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(putErrors, true));
+
+ Assert.assertEquals(3, putErrors.getErrors().size());
+ PutError e = putErrors.getErrors().get(0);
+ Assert.assertEquals(error1.getEntityId(), e.getEntityId());
+ Assert.assertEquals(error1.getEntityType(), e.getEntityType());
+ Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
+ e = putErrors.getErrors().get(1);
+ Assert.assertEquals(error1.getEntityId(), e.getEntityId());
+ Assert.assertEquals(error1.getEntityType(), e.getEntityType());
+ Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
+ e = putErrors.getErrors().get(2);
+ Assert.assertEquals(error2.getEntityId(), e.getEntityId());
+ Assert.assertEquals(error2.getEntityType(), e.getEntityType());
+ Assert.assertEquals(error2.getErrorCode(), e.getErrorCode());
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 73a0941..f445260 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -33,8 +33,8 @@
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.LeveldbApplicationTimelineStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps;
@@ -54,7 +54,7 @@
ApplicationHistoryClientService ahsClientService;
ApplicationHistoryManager historyManager;
- ApplicationTimelineStore timelineStore;
+ TimelineStore timelineStore;
private WebApp webApp;
public ApplicationHistoryServer() {
@@ -67,7 +67,7 @@ protected void serviceInit(Configuration conf) throws Exception {
ahsClientService = createApplicationHistoryClientService(historyManager);
addService(ahsClientService);
addService((Service) historyManager);
- timelineStore = createApplicationTimelineStore(conf);
+ timelineStore = createTimelineStore(conf);
addIfService(timelineStore);
super.serviceInit(conf);
}
@@ -141,11 +141,11 @@ protected ApplicationHistoryManager createApplicationHistoryManager(
return new ApplicationHistoryManagerImpl();
}
- protected ApplicationTimelineStore createApplicationTimelineStore(
+ protected TimelineStore createTimelineStore(
Configuration conf) {
return ReflectionUtils.newInstance(conf.getClass(
- YarnConfiguration.ATS_STORE, LeveldbApplicationTimelineStore.class,
- ApplicationTimelineStore.class), conf);
+ YarnConfiguration.TIMELINE_STORE, LeveldbTimelineStore.class,
+ TimelineStore.class), conf);
}
protected void startWebApp() {
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java
deleted file mode 100644
index e448ba8..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.EnumSet;
-import java.util.Set;
-import java.util.SortedSet;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents;
-
-/**
- * This interface is for retrieving application timeline information.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public interface ApplicationTimelineReader {
-
- /**
- * Possible fields to retrieve for {@link #getEntities} and {@link
- * #getEntity}.
- */
- enum Field {
- EVENTS,
- RELATED_ENTITIES,
- PRIMARY_FILTERS,
- OTHER_INFO,
- LAST_EVENT_ONLY
- }
-
- /**
- * Default limit for {@link #getEntities} and {@link #getEntityTimelines}.
- */
- final long DEFAULT_LIMIT = 100;
-
- /**
- * This method retrieves a list of entity information, {@link ATSEntity},
- * sorted by the starting timestamp for the entity, descending.
- *
- * @param entityType The type of entities to return (required).
- * @param limit A limit on the number of entities to return. If null,
- * defaults to {@link #DEFAULT_LIMIT}.
- * @param windowStart The earliest start timestamp to retrieve (exclusive).
- * If null, defaults to retrieving all entities until the
- * limit is reached.
- * @param windowEnd The latest start timestamp to retrieve (inclusive).
- * If null, defaults to {@link Long#MAX_VALUE}
- * @param primaryFilter Retrieves only entities that have the specified
- * primary filter. If null, retrieves all entities.
- * This is an indexed retrieval, and no entities that
- * do not match the filter are scanned.
- * @param secondaryFilters Retrieves only entities that have exact matches
- * for all the specified filters in their primary
- * filters or other info. This is not an indexed
- * retrieval, so all entities are scanned but only
- * those matching the filters are returned.
- * @param fieldsToRetrieve Specifies which fields of the entity object to
- * retrieve (see {@link Field}). If the set of fields
- * contains {@link Field#LAST_EVENT_ONLY} and not
- * {@link Field#EVENTS}, the most recent event for
- * each entity is retrieved. If null, retrieves all
- * fields.
- * @return An {@link ATSEntities} object.
- * @throws IOException
- */
- ATSEntities getEntities(String entityType,
- Long limit, Long windowStart, Long windowEnd,
- NameValuePair primaryFilter, Collection secondaryFilters,
- EnumSet fieldsToRetrieve) throws IOException;
-
- /**
- * This method retrieves the entity information for a given entity.
- *
- * @param entity The entity whose information will be retrieved.
- * @param entityType The type of the entity.
- * @param fieldsToRetrieve Specifies which fields of the entity object to
- * retrieve (see {@link Field}). If the set of
- * fields contains {@link Field#LAST_EVENT_ONLY} and
- * not {@link Field#EVENTS}, the most recent event
- * for each entity is retrieved. If null, retrieves
- * all fields.
- * @return An {@link ATSEntity} object.
- * @throws IOException
- */
- ATSEntity getEntity(String entity, String entityType, EnumSet
- fieldsToRetrieve) throws IOException;
-
- /**
- * This method retrieves the events for a list of entities all of the same
- * entity type. The events for each entity are sorted in order of their
- * timestamps, descending.
- *
- * @param entityType The type of entities to retrieve events for.
- * @param entityIds The entity IDs to retrieve events for.
- * @param limit A limit on the number of events to return for each entity.
- * If null, defaults to {@link #DEFAULT_LIMIT} events per
- * entity.
- * @param windowStart If not null, retrieves only events later than the
- * given time (exclusive)
- * @param windowEnd If not null, retrieves only events earlier than the
- * given time (inclusive)
- * @param eventTypes Restricts the events returned to the given types. If
- * null, events of all types will be returned.
- * @return An {@link ATSEvents} object.
- * @throws IOException
- */
- ATSEvents getEntityTimelines(String entityType,
- SortedSet entityIds, Long limit, Long windowStart,
- Long windowEnd, Set eventTypes) throws IOException;
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStore.java
deleted file mode 100644
index b231418..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStore.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.service.Service;
-
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public interface ApplicationTimelineStore extends
- Service, ApplicationTimelineReader, ApplicationTimelineWriter {
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java
deleted file mode 100644
index 2a16833..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
-
-import java.io.IOException;
-
-/**
- * This interface is for storing application timeline information.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public interface ApplicationTimelineWriter {
-
- /**
- * Stores entity information to the application timeline store. Any errors
- * occurring for individual put request objects will be reported in the
- * response.
- *
- * @param data An {@link ATSEntities} object.
- * @return An {@link ATSPutErrors} object.
- * @throws IOException
- */
- ATSPutErrors put(ATSEntities data) throws IOException;
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java
deleted file mode 100644
index d22e616..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- * The unique identifier for an entity
- */
-@Private
-@Unstable
-public class EntityIdentifier implements Comparable {
-
- private String id;
- private String type;
-
- public EntityIdentifier(String id, String type) {
- this.id = id;
- this.type = type;
- }
-
- /**
- * Get the entity Id.
- * @return The entity Id.
- */
- public String getId() {
- return id;
- }
-
- /**
- * Get the entity type.
- * @return The entity type.
- */
- public String getType() {
- return type;
- }
-
- @Override
- public int compareTo(EntityIdentifier other) {
- int c = type.compareTo(other.type);
- if (c != 0) return c;
- return id.compareTo(other.id);
- }
-
- @Override
- public int hashCode() {
- // generated by eclipse
- final int prime = 31;
- int result = 1;
- result = prime * result + ((id == null) ? 0 : id.hashCode());
- result = prime * result + ((type == null) ? 0 : type.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- // generated by eclipse
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- EntityIdentifier other = (EntityIdentifier) obj;
- if (id == null) {
- if (other.id != null)
- return false;
- } else if (!id.equals(other.id))
- return false;
- if (type == null) {
- if (other.type != null)
- return false;
- } else if (!type.equals(other.type))
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "{ id: " + id + ", type: "+ type + " }";
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java
deleted file mode 100644
index 38ceb30..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.WritableUtils;
-import org.codehaus.jackson.map.ObjectMapper;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-/**
- * A utility class providing methods for serializing and deserializing
- * objects. The {@link #write(Object)}, {@link #read(byte[])} and {@link
- * #write(java.io.DataOutputStream, Object)}, {@link
- * #read(java.io.DataInputStream)} methods are used by the
- * {@link LeveldbApplicationTimelineStore} to store and retrieve arbitrary
- * JSON, while the {@link #writeReverseOrderedLong} and {@link
- * #readReverseOrderedLong} methods are used to sort entities in descending
- * start time order.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class GenericObjectMapper {
- private static final byte[] EMPTY_BYTES = new byte[0];
-
- private static final byte LONG = 0x1;
- private static final byte INTEGER = 0x2;
- private static final byte DOUBLE = 0x3;
- private static final byte STRING = 0x4;
- private static final byte BOOLEAN = 0x5;
- private static final byte LIST = 0x6;
- private static final byte MAP = 0x7;
-
- /**
- * Serializes an Object into a byte array. Along with {@link #read(byte[]) },
- * can be used to serialize an Object and deserialize it into an Object of
- * the same type without needing to specify the Object's type,
- * as long as it is one of the JSON-compatible objects Long, Integer,
- * Double, String, Boolean, List, or Map. The current implementation uses
- * ObjectMapper to serialize complex objects (List and Map) while using
- * Writable to serialize simpler objects, to produce fewer bytes.
- *
- * @param o An Object
- * @return A byte array representation of the Object
- * @throws IOException
- */
- public static byte[] write(Object o) throws IOException {
- if (o == null)
- return EMPTY_BYTES;
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- write(new DataOutputStream(baos), o);
- return baos.toByteArray();
- }
-
- /**
- * Serializes an Object and writes it to a DataOutputStream. Along with
- * {@link #read(java.io.DataInputStream)}, can be used to serialize an Object
- * and deserialize it into an Object of the same type without needing to
- * specify the Object's type, as long as it is one of the JSON-compatible
- * objects Long, Integer, Double, String, Boolean, List, or Map. The current
- * implementation uses ObjectMapper to serialize complex objects (List and
- * Map) while using Writable to serialize simpler objects, to produce fewer
- * bytes.
- *
- * @param dos A DataOutputStream
- * @param o An Object
- * @throws IOException
- */
- public static void write(DataOutputStream dos, Object o)
- throws IOException {
- if (o == null)
- return;
- if (o instanceof Long) {
- dos.write(LONG);
- WritableUtils.writeVLong(dos, (Long) o);
- } else if(o instanceof Integer) {
- dos.write(INTEGER);
- WritableUtils.writeVInt(dos, (Integer) o);
- } else if(o instanceof Double) {
- dos.write(DOUBLE);
- dos.writeDouble((Double) o);
- } else if (o instanceof String) {
- dos.write(STRING);
- WritableUtils.writeString(dos, (String) o);
- } else if (o instanceof Boolean) {
- dos.write(BOOLEAN);
- dos.writeBoolean((Boolean) o);
- } else if (o instanceof List) {
- dos.write(LIST);
- ObjectMapper mapper = new ObjectMapper();
- mapper.writeValue(dos, o);
- } else if (o instanceof Map) {
- dos.write(MAP);
- ObjectMapper mapper = new ObjectMapper();
- mapper.writeValue(dos, o);
- } else {
- throw new IOException("Couldn't serialize object");
- }
- }
-
- /**
- * Deserializes an Object from a byte array created with
- * {@link #write(Object)}.
- *
- * @param b A byte array
- * @return An Object
- * @throws IOException
- */
- public static Object read(byte[] b) throws IOException {
- if (b == null || b.length == 0)
- return null;
- ByteArrayInputStream bais = new ByteArrayInputStream(b);
- return read(new DataInputStream(bais));
- }
-
- /**
- * Reads an Object from a DataInputStream whose data has been written with
- * {@link #write(java.io.DataOutputStream, Object)}.
- *
- * @param dis A DataInputStream
- * @return An Object, null if an unrecognized type
- * @throws IOException
- */
- public static Object read(DataInputStream dis) throws IOException {
- byte code = (byte)dis.read();
- ObjectMapper mapper;
- switch (code) {
- case LONG:
- return WritableUtils.readVLong(dis);
- case INTEGER:
- return WritableUtils.readVInt(dis);
- case DOUBLE:
- return dis.readDouble();
- case STRING:
- return WritableUtils.readString(dis);
- case BOOLEAN:
- return dis.readBoolean();
- case LIST:
- mapper = new ObjectMapper();
- return mapper.readValue(dis, ArrayList.class);
- case MAP:
- mapper = new ObjectMapper();
- return mapper.readValue(dis, HashMap.class);
- default:
- return null;
- }
- }
-
- /**
- * Converts a long to a 8-byte array so that lexicographic ordering of the
- * produced byte arrays sort the longs in descending order.
- *
- * @param l A long
- * @return A byte array
- */
- public static byte[] writeReverseOrderedLong(long l) {
- byte[] b = new byte[8];
- b[0] = (byte)(0x7f ^ ((l >> 56) & 0xff));
- for (int i = 1; i < 7; i++)
- b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff));
- b[7] = (byte)(0xff ^ (l & 0xff));
- return b;
- }
-
- /**
- * Reads 8 bytes from an array starting at the specified offset and
- * converts them to a long. The bytes are assumed to have been created
- * with {@link #writeReverseOrderedLong}.
- *
- * @param b A byte array
- * @param offset An offset into the byte array
- * @return A long
- */
- public static long readReverseOrderedLong(byte[] b, int offset) {
- long l = b[offset] & 0xff;
- for (int i = 1; i < 8; i++) {
- l = l << 8;
- l = l | (b[offset+i]&0xff);
- }
- return l ^ 0x7fffffffffffffffl;
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java
deleted file mode 100644
index c2e93ca..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java
+++ /dev/null
@@ -1,854 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeMap;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.collections.map.LRUMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.fusesource.leveldbjni.JniDBFactory;
-import org.iq80.leveldb.DB;
-import org.iq80.leveldb.DBIterator;
-import org.iq80.leveldb.Options;
-import org.iq80.leveldb.WriteBatch;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice
- .apptimeline.GenericObjectMapper.readReverseOrderedLong;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice
- .apptimeline.GenericObjectMapper.writeReverseOrderedLong;
-
-/**
- * An implementation of an application timeline store backed by leveldb.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class LeveldbApplicationTimelineStore extends AbstractService
- implements ApplicationTimelineStore {
- private static final Log LOG = LogFactory
- .getLog(LeveldbApplicationTimelineStore.class);
-
- private static final String FILENAME = "leveldb-apptimeline-store.ldb";
-
- private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes();
- private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes();
- private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes();
-
- private static final byte[] PRIMARY_FILTER_COLUMN = "f".getBytes();
- private static final byte[] OTHER_INFO_COLUMN = "i".getBytes();
- private static final byte[] RELATED_COLUMN = "r".getBytes();
- private static final byte[] TIME_COLUMN = "t".getBytes();
-
- private static final byte[] EMPTY_BYTES = new byte[0];
-
- private static final int START_TIME_CACHE_SIZE = 10000;
-
- @SuppressWarnings("unchecked")
- private final Map startTimeCache =
- Collections.synchronizedMap(new LRUMap(START_TIME_CACHE_SIZE));
-
- private DB db;
-
- public LeveldbApplicationTimelineStore() {
- super(LeveldbApplicationTimelineStore.class.getName());
- }
-
- @Override
- protected void serviceInit(Configuration conf) throws Exception {
- Options options = new Options();
- options.createIfMissing(true);
- JniDBFactory factory = new JniDBFactory();
- String path = conf.get(YarnConfiguration.ATS_LEVELDB_PATH_PROPERTY);
- File p = new File(path);
- if (!p.exists())
- if (!p.mkdirs())
- throw new IOException("Couldn't create directory for leveldb " +
- "application timeline store " + path);
- LOG.info("Using leveldb path " + path);
- db = factory.open(new File(path, FILENAME), options);
- super.serviceInit(conf);
- }
-
- @Override
- protected void serviceStop() throws Exception {
- IOUtils.cleanup(LOG, db);
- super.serviceStop();
- }
-
- private static class KeyBuilder {
- private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10;
- private byte[][] b;
- private boolean[] useSeparator;
- private int index;
- private int length;
-
- public KeyBuilder(int size) {
- b = new byte[size][];
- useSeparator = new boolean[size];
- index = 0;
- length = 0;
- }
-
- public static KeyBuilder newInstance() {
- return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS);
- }
-
- public KeyBuilder add(String s) {
- return add(s.getBytes(), true);
- }
-
- public KeyBuilder add(byte[] t) {
- return add(t, false);
- }
-
- public KeyBuilder add(byte[] t, boolean sep) {
- b[index] = t;
- useSeparator[index] = sep;
- length += t.length;
- if (sep)
- length++;
- index++;
- return this;
- }
-
- public byte[] getBytes() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream(length);
- for (int i = 0; i < index; i++) {
- baos.write(b[i]);
- if (i < index-1 && useSeparator[i])
- baos.write(0x0);
- }
- return baos.toByteArray();
- }
-
- public byte[] getBytesForLookup() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream(length);
- for (int i = 0; i < index; i++) {
- baos.write(b[i]);
- if (useSeparator[i])
- baos.write(0x0);
- }
- return baos.toByteArray();
- }
- }
-
- private static class KeyParser {
- private final byte[] b;
- private int offset;
-
- public KeyParser(byte[] b, int offset) {
- this.b = b;
- this.offset = offset;
- }
-
- public String getNextString() throws IOException {
- if (offset >= b.length)
- throw new IOException(
- "tried to read nonexistent string from byte array");
- int i = 0;
- while (offset+i < b.length && b[offset+i] != 0x0)
- i++;
- String s = new String(b, offset, i);
- offset = offset + i + 1;
- return s;
- }
-
- public long getNextLong() throws IOException {
- if (offset+8 >= b.length)
- throw new IOException("byte array ran out when trying to read long");
- long l = readReverseOrderedLong(b, offset);
- offset += 8;
- return l;
- }
-
- public int getOffset() {
- return offset;
- }
- }
-
- @Override
- public ATSEntity getEntity(String entity, String entityType,
- EnumSet fields) throws IOException {
- DBIterator iterator = null;
- try {
- byte[] revStartTime = getStartTime(entity, entityType, null, null, null);
- if (revStartTime == null)
- return null;
- byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).add(revStartTime).add(entity).getBytesForLookup();
-
- iterator = db.iterator();
- iterator.seek(prefix);
-
- return getEntity(entity, entityType,
- readReverseOrderedLong(revStartTime, 0), fields, iterator, prefix,
- prefix.length);
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- }
-
- /**
- * Read entity from a db iterator. If no information is found in the
- * specified fields for this entity, return null.
- */
- private static ATSEntity getEntity(String entity, String entityType,
- Long startTime, EnumSet fields, DBIterator iterator,
- byte[] prefix, int prefixlen) throws IOException {
- if (fields == null)
- fields = EnumSet.allOf(Field.class);
-
- ATSEntity atsEntity = new ATSEntity();
- boolean events = false;
- boolean lastEvent = false;
- if (fields.contains(Field.EVENTS)) {
- events = true;
- atsEntity.setEvents(new ArrayList());
- } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
- lastEvent = true;
- atsEntity.setEvents(new ArrayList());
- }
- else {
- atsEntity.setEvents(null);
- }
- boolean relatedEntities = false;
- if (fields.contains(Field.RELATED_ENTITIES)) {
- relatedEntities = true;
- atsEntity.setRelatedEntities(new HashMap>());
- } else {
- atsEntity.setRelatedEntities(null);
- }
- boolean primaryFilters = false;
- if (fields.contains(Field.PRIMARY_FILTERS)) {
- primaryFilters = true;
- atsEntity.setPrimaryFilters(new HashMap());
- } else {
- atsEntity.setPrimaryFilters(null);
- }
- boolean otherInfo = false;
- if (fields.contains(Field.OTHER_INFO)) {
- otherInfo = true;
- atsEntity.setOtherInfo(new HashMap());
- } else {
- atsEntity.setOtherInfo(null);
- }
-
- // iterate through the entity's entry, parsing information if it is part
- // of a requested field
- for (; iterator.hasNext(); iterator.next()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefixlen, key))
- break;
- if (key[prefixlen] == PRIMARY_FILTER_COLUMN[0]) {
- if (primaryFilters) {
- atsEntity.addPrimaryFilter(parseRemainingKey(key,
- prefixlen + PRIMARY_FILTER_COLUMN.length),
- GenericObjectMapper.read(iterator.peekNext().getValue()));
- }
- } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
- if (otherInfo) {
- atsEntity.addOtherInfo(parseRemainingKey(key,
- prefixlen + OTHER_INFO_COLUMN.length),
- GenericObjectMapper.read(iterator.peekNext().getValue()));
- }
- } else if (key[prefixlen] == RELATED_COLUMN[0]) {
- if (relatedEntities) {
- addRelatedEntity(atsEntity, key,
- prefixlen + RELATED_COLUMN.length);
- }
- } else if (key[prefixlen] == TIME_COLUMN[0]) {
- if (events || (lastEvent && atsEntity.getEvents().size() == 0)) {
- ATSEvent event = getEntityEvent(null, key, prefixlen +
- TIME_COLUMN.length, iterator.peekNext().getValue());
- if (event != null) {
- atsEntity.addEvent(event);
- }
- }
- } else {
- LOG.warn(String.format("Found unexpected column for entity %s of " +
- "type %s (0x%02x)", entity, entityType, key[prefixlen]));
- }
- }
-
- atsEntity.setEntityId(entity);
- atsEntity.setEntityType(entityType);
- atsEntity.setStartTime(startTime);
-
- return atsEntity;
- }
-
- @Override
- public ATSEvents getEntityTimelines(String entityType,
- SortedSet entityIds, Long limit, Long windowStart,
- Long windowEnd, Set eventType) throws IOException {
- ATSEvents atsEvents = new ATSEvents();
- if (entityIds == null || entityIds.isEmpty())
- return atsEvents;
- // create a lexicographically-ordered map from start time to entities
- Map> startTimeMap = new TreeMap>(new Comparator() {
- @Override
- public int compare(byte[] o1, byte[] o2) {
- return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0,
- o2.length);
- }
- });
- DBIterator iterator = null;
- try {
- // look up start times for the specified entities
- // skip entities with no start time
- for (String entity : entityIds) {
- byte[] startTime = getStartTime(entity, entityType, null, null, null);
- if (startTime != null) {
- List entities = startTimeMap.get(startTime);
- if (entities == null) {
- entities = new ArrayList();
- startTimeMap.put(startTime, entities);
- }
- entities.add(new EntityIdentifier(entity, entityType));
- }
- }
- for (Entry> entry :
- startTimeMap.entrySet()) {
- // look up the events matching the given parameters (limit,
- // start time, end time, event types) for entities whose start times
- // were found and add the entities to the return list
- byte[] revStartTime = entry.getKey();
- for (EntityIdentifier entity : entry.getValue()) {
- ATSEventsOfOneEntity atsEntity = new ATSEventsOfOneEntity();
- atsEntity.setEntityId(entity.getId());
- atsEntity.setEntityType(entityType);
- atsEvents.addEvent(atsEntity);
- KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).add(revStartTime).add(entity.getId())
- .add(TIME_COLUMN);
- byte[] prefix = kb.getBytesForLookup();
- if (windowEnd == null) {
- windowEnd = Long.MAX_VALUE;
- }
- byte[] revts = writeReverseOrderedLong(windowEnd);
- kb.add(revts);
- byte[] first = kb.getBytesForLookup();
- byte[] last = null;
- if (windowStart != null) {
- last = KeyBuilder.newInstance().add(prefix)
- .add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
- }
- if (limit == null) {
- limit = DEFAULT_LIMIT;
- }
- iterator = db.iterator();
- for (iterator.seek(first); atsEntity.getEvents().size() < limit &&
- iterator.hasNext(); iterator.next()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
- WritableComparator.compareBytes(key, 0, key.length, last, 0,
- last.length) > 0))
- break;
- ATSEvent event = getEntityEvent(eventType, key, prefix.length,
- iterator.peekNext().getValue());
- if (event != null)
- atsEntity.addEvent(event);
- }
- }
- }
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- return atsEvents;
- }
-
- /**
- * Returns true if the byte array begins with the specified prefix.
- */
- private static boolean prefixMatches(byte[] prefix, int prefixlen,
- byte[] b) {
- if (b.length < prefixlen)
- return false;
- return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0,
- prefixlen) == 0;
- }
-
- @Override
- public ATSEntities getEntities(String entityType,
- Long limit, Long windowStart, Long windowEnd,
- NameValuePair primaryFilter, Collection secondaryFilters,
- EnumSet fields) throws IOException {
- if (primaryFilter == null) {
- // if no primary filter is specified, prefix the lookup with
- // ENTITY_ENTRY_PREFIX
- return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit,
- windowStart, windowEnd, secondaryFilters, fields);
- } else {
- // if a primary filter is specified, prefix the lookup with
- // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue +
- // ENTITY_ENTRY_PREFIX
- byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX)
- .add(primaryFilter.getName())
- .add(GenericObjectMapper.write(primaryFilter.getValue()), true)
- .add(ENTITY_ENTRY_PREFIX).getBytesForLookup();
- return getEntityByTime(base, entityType, limit, windowStart, windowEnd,
- secondaryFilters, fields);
- }
- }
-
- /**
- * Retrieves a list of entities satisfying given parameters.
- *
- * @param base A byte array prefix for the lookup
- * @param entityType The type of the entity
- * @param limit A limit on the number of entities to return
- * @param starttime The earliest entity start time to retrieve (exclusive)
- * @param endtime The latest entity start time to retrieve (inclusive)
- * @param secondaryFilters Filter pairs that the entities should match
- * @param fields The set of fields to retrieve
- * @return A list of entities
- * @throws IOException
- */
- private ATSEntities getEntityByTime(byte[] base,
- String entityType, Long limit, Long starttime, Long endtime,
- Collection secondaryFilters, EnumSet fields)
- throws IOException {
- DBIterator iterator = null;
- try {
- KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
- // only db keys matching the prefix (base + entity type) will be parsed
- byte[] prefix = kb.getBytesForLookup();
- if (endtime == null) {
- // if end time is null, place no restriction on end time
- endtime = Long.MAX_VALUE;
- }
- // using end time, construct a first key that will be seeked to
- byte[] revts = writeReverseOrderedLong(endtime);
- kb.add(revts);
- byte[] first = kb.getBytesForLookup();
- byte[] last = null;
- if (starttime != null) {
- // if start time is not null, set a last key that will not be
- // iterated past
- last = KeyBuilder.newInstance().add(base).add(entityType)
- .add(writeReverseOrderedLong(starttime)).getBytesForLookup();
- }
- if (limit == null) {
- // if limit is not specified, use the default
- limit = DEFAULT_LIMIT;
- }
-
- ATSEntities atsEntities = new ATSEntities();
- iterator = db.iterator();
- iterator.seek(first);
- // iterate until one of the following conditions is met: limit is
- // reached, there are no more keys, the key prefix no longer matches,
- // or a start time has been specified and reached/exceeded
- while (atsEntities.getEntities().size() < limit && iterator.hasNext()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
- WritableComparator.compareBytes(key, 0, key.length, last, 0,
- last.length) > 0))
- break;
- // read the start time and entity from the current key
- KeyParser kp = new KeyParser(key, prefix.length);
- Long startTime = kp.getNextLong();
- String entity = kp.getNextString();
- // parse the entity that owns this key, iterating over all keys for
- // the entity
- ATSEntity atsEntity = getEntity(entity, entityType, startTime,
- fields, iterator, key, kp.getOffset());
- if (atsEntity == null)
- continue;
- // determine if the retrieved entity matches the provided secondary
- // filters, and if so add it to the list of entities to return
- boolean filterPassed = true;
- if (secondaryFilters != null) {
- for (NameValuePair filter : secondaryFilters) {
- Object v = atsEntity.getOtherInfo().get(filter.getName());
- if (v == null)
- v = atsEntity.getPrimaryFilters().get(filter.getName());
- if (v == null || !v.equals(filter.getValue())) {
- filterPassed = false;
- break;
- }
- }
- }
- if (filterPassed)
- atsEntities.addEntity(atsEntity);
- }
- return atsEntities;
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- }
-
- /**
- * Put a single entity. If there is an error, add a PutError to the given
- * response.
- */
- private void put(ATSEntity atsEntity, ATSPutErrors response) {
- WriteBatch writeBatch = null;
- try {
- writeBatch = db.createWriteBatch();
- List events = atsEntity.getEvents();
- // look up the start time for the entity
- byte[] revStartTime = getStartTime(atsEntity.getEntityId(),
- atsEntity.getEntityType(), atsEntity.getStartTime(), events,
- writeBatch);
- if (revStartTime == null) {
- // if no start time is found, add an error and return
- ATSPutError error = new ATSPutError();
- error.setEntityId(atsEntity.getEntityId());
- error.setEntityType(atsEntity.getEntityType());
- error.setErrorCode(ATSPutError.NO_START_TIME);
- response.addError(error);
- return;
- }
- Long revStartTimeLong = readReverseOrderedLong(revStartTime, 0);
- Map primaryFilters = atsEntity.getPrimaryFilters();
-
- // write event entries
- if (events != null && !events.isEmpty()) {
- for (ATSEvent event : events) {
- byte[] revts = writeReverseOrderedLong(event.getTimestamp());
- byte[] key = createEntityEventKey(atsEntity.getEntityId(),
- atsEntity.getEntityType(), revStartTime, revts,
- event.getEventType());
- byte[] value = GenericObjectMapper.write(event.getEventInfo());
- writeBatch.put(key, value);
- writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
- }
- }
-
- // write related entity entries
- Map> relatedEntities =
- atsEntity.getRelatedEntities();
- if (relatedEntities != null && !relatedEntities.isEmpty()) {
- for (Entry> relatedEntityList :
- relatedEntities.entrySet()) {
- String relatedEntityType = relatedEntityList.getKey();
- for (String relatedEntityId : relatedEntityList.getValue()) {
- // look up start time of related entity
- byte[] relatedEntityStartTime = getStartTime(relatedEntityId,
- relatedEntityType, null, null, writeBatch);
- if (relatedEntityStartTime == null) {
- // if start time is not found, set start time of the related
- // entity to the start time of this entity, and write it to the
- // db and the cache
- relatedEntityStartTime = revStartTime;
- writeBatch.put(createStartTimeLookupKey(relatedEntityId,
- relatedEntityType), relatedEntityStartTime);
- startTimeCache.put(new EntityIdentifier(relatedEntityId,
- relatedEntityType), revStartTimeLong);
- }
- // write reverse entry (related entity -> entity)
- byte[] key = createReleatedEntityKey(relatedEntityId,
- relatedEntityType, relatedEntityStartTime,
- atsEntity.getEntityId(), atsEntity.getEntityType());
- writeBatch.put(key, EMPTY_BYTES);
- // TODO: write forward entry (entity -> related entity)?
- }
- }
- }
-
- // write primary filter entries
- if (primaryFilters != null && !primaryFilters.isEmpty()) {
- for (Entry primaryFilter : primaryFilters.entrySet()) {
- byte[] key = createPrimaryFilterKey(atsEntity.getEntityId(),
- atsEntity.getEntityType(), revStartTime, primaryFilter.getKey());
- byte[] value = GenericObjectMapper.write(primaryFilter.getValue());
- writeBatch.put(key, value);
- writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
- }
- }
-
- // write other info entries
- Map otherInfo = atsEntity.getOtherInfo();
- if (otherInfo != null && !otherInfo.isEmpty()) {
- for (Entry i : otherInfo.entrySet()) {
- byte[] key = createOtherInfoKey(atsEntity.getEntityId(),
- atsEntity.getEntityType(), revStartTime, i.getKey());
- byte[] value = GenericObjectMapper.write(i.getValue());
- writeBatch.put(key, value);
- writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
- }
- }
- db.write(writeBatch);
- } catch (IOException e) {
- LOG.error("Error putting entity " + atsEntity.getEntityId() +
- " of type " + atsEntity.getEntityType(), e);
- ATSPutError error = new ATSPutError();
- error.setEntityId(atsEntity.getEntityId());
- error.setEntityType(atsEntity.getEntityType());
- error.setErrorCode(ATSPutError.IO_EXCEPTION);
- response.addError(error);
- } finally {
- IOUtils.cleanup(LOG, writeBatch);
- }
- }
-
- /**
- * For a given key / value pair that has been written to the db,
- * write additional entries to the db for each primary filter.
- */
- private static void writePrimaryFilterEntries(WriteBatch writeBatch,
- Map primaryFilters, byte[] key, byte[] value)
- throws IOException {
- if (primaryFilters != null && !primaryFilters.isEmpty()) {
- for (Entry p : primaryFilters.entrySet()) {
- writeBatch.put(addPrimaryFilterToKey(p.getKey(), p.getValue(),
- key), value);
- }
- }
- }
-
- @Override
- public ATSPutErrors put(ATSEntities atsEntities) {
- ATSPutErrors response = new ATSPutErrors();
- for (ATSEntity atsEntity : atsEntities.getEntities()) {
- put(atsEntity, response);
- }
- return response;
- }
-
- /**
- * Get the unique start time for a given entity as a byte array that sorts
- * the timestamps in reverse order (see {@link
- * GenericObjectMapper#writeReverseOrderedLong(long)}).
- *
- * @param entityId The id of the entity
- * @param entityType The type of the entity
- * @param startTime The start time of the entity, or null
- * @param events A list of events for the entity, or null
- * @param writeBatch A leveldb write batch, if the method is called by a
- * put as opposed to a get
- * @return A byte array
- * @throws IOException
- */
- private byte[] getStartTime(String entityId, String entityType,
- Long startTime, List events, WriteBatch writeBatch)
- throws IOException {
- EntityIdentifier entity = new EntityIdentifier(entityId, entityType);
- if (startTime == null) {
- // start time is not provided, so try to look it up
- if (startTimeCache.containsKey(entity)) {
- // found the start time in the cache
- startTime = startTimeCache.get(entity);
- } else {
- // try to look up the start time in the db
- byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType());
- byte[] v = db.get(b);
- if (v == null) {
- // did not find the start time in the db
- // if this is a put, try to set it from the provided events
- if (events == null || writeBatch == null) {
- // no events, or not a put, so return null
- return null;
- }
- Long min = Long.MAX_VALUE;
- for (ATSEvent e : events)
- if (min > e.getTimestamp())
- min = e.getTimestamp();
- startTime = min;
- // selected start time as minimum timestamp of provided events
- // write start time to db and cache
- writeBatch.put(b, writeReverseOrderedLong(startTime));
- startTimeCache.put(entity, startTime);
- } else {
- // found the start time in the db
- startTime = readReverseOrderedLong(v, 0);
- if (writeBatch != null) {
- // if this is a put, re-add the start time to the cache
- startTimeCache.put(entity, startTime);
- }
- }
- }
- } else {
- // start time is provided
- // TODO: verify start time in db as well as cache?
- if (startTimeCache.containsKey(entity)) {
- // if the start time is already in the cache,
- // and it is different from the provided start time,
- // use the one from the cache
- if (!startTime.equals(startTimeCache.get(entity)))
- startTime = startTimeCache.get(entity);
- } else if (writeBatch != null) {
- // if this is a put, write the provided start time to the db and the
- // cache
- byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType());
- writeBatch.put(b, writeReverseOrderedLong(startTime));
- startTimeCache.put(entity, startTime);
- }
- }
- return writeReverseOrderedLong(startTime);
- }
-
- /**
- * Creates a key for looking up the start time of a given entity,
- * of the form START_TIME_LOOKUP_PREFIX + entitytype + entity.
- */
- private static byte[] createStartTimeLookupKey(String entity,
- String entitytype) throws IOException {
- return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX)
- .add(entitytype).add(entity).getBytes();
- }
-
- /**
- * Creates an index entry for the given key of the form
- * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key.
- */
- private static byte[] addPrimaryFilterToKey(String primaryFilterName,
- Object primaryFilterValue, byte[] key) throws IOException {
- return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX)
- .add(primaryFilterName)
- .add(GenericObjectMapper.write(primaryFilterValue), true).add(key)
- .getBytes();
- }
-
- /**
- * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entitytype +
- * revstarttime + entity + TIME_COLUMN + reveventtimestamp + eventtype.
- */
- private static byte[] createEntityEventKey(String entity, String entitytype,
- byte[] revStartTime, byte[] reveventtimestamp, String eventtype)
- throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entitytype).add(revStartTime).add(entity).add(TIME_COLUMN)
- .add(reveventtimestamp).add(eventtype).getBytes();
- }
-
- /**
- * Creates an event object from the given key, offset, and value. If the
- * event type is not contained in the specified set of event types,
- * returns null.
- */
- private static ATSEvent getEntityEvent(Set eventTypes, byte[] key,
- int offset, byte[] value) throws IOException {
- KeyParser kp = new KeyParser(key, offset);
- long ts = kp.getNextLong();
- String tstype = kp.getNextString();
- if (eventTypes == null || eventTypes.contains(tstype)) {
- ATSEvent event = new ATSEvent();
- event.setTimestamp(ts);
- event.setEventType(tstype);
- Object o = GenericObjectMapper.read(value);
- if (o == null) {
- event.setEventInfo(null);
- } else if (o instanceof Map) {
- @SuppressWarnings("unchecked")
- Map m = (Map) o;
- event.setEventInfo(m);
- } else {
- throw new IOException("Couldn't deserialize event info map");
- }
- return event;
- }
- return null;
- }
-
- /**
- * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX +
- * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name.
- */
- private static byte[] createPrimaryFilterKey(String entity,
- String entitytype, byte[] revStartTime, String name) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype)
- .add(revStartTime).add(entity).add(PRIMARY_FILTER_COLUMN).add(name)
- .getBytes();
- }
-
- /**
- * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entitytype +
- * revstarttime + entity + OTHER_INFO_COLUMN + name.
- */
- private static byte[] createOtherInfoKey(String entity, String entitytype,
- byte[] revStartTime, String name) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype)
- .add(revStartTime).add(entity).add(OTHER_INFO_COLUMN).add(name)
- .getBytes();
- }
-
- /**
- * Creates a string representation of the byte array from the given offset
- * to the end of the array (for parsing other info keys).
- */
- private static String parseRemainingKey(byte[] b, int offset) {
- return new String(b, offset, b.length - offset);
- }
-
- /**
- * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX +
- * entitytype + revstarttime + entity + RELATED_COLUMN + relatedentitytype +
- * relatedentity.
- */
- private static byte[] createReleatedEntityKey(String entity,
- String entitytype, byte[] revStartTime, String relatedEntity,
- String relatedEntityType) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype)
- .add(revStartTime).add(entity).add(RELATED_COLUMN)
- .add(relatedEntityType).add(relatedEntity).getBytes();
- }
-
- /**
- * Parses the related entity from the given key at the given offset and
- * adds it to the given entity.
- */
- private static void addRelatedEntity(ATSEntity atsEntity, byte[] key,
- int offset) throws IOException {
- KeyParser kp = new KeyParser(key, offset);
- String type = kp.getNextString();
- String id = kp.getNextString();
- atsEntity.addRelatedEntity(type, id);
- }
-
- /**
- * Clears the cache to test reloading start times from leveldb (only for
- * testing).
- */
- @VisibleForTesting
- void clearStartTimeCache() {
- startTimeCache.clear();
- }
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java
deleted file mode 100644
index 1c8e392..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java
+++ /dev/null
@@ -1,296 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.PriorityQueue;
-import java.util.Set;
-import java.util.SortedSet;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError;
-
-/**
- * In-memory implementation of {@link ApplicationTimelineStore}. This
- * implementation is for test purpose only. If users improperly instantiate it,
- * they may encounter reading and writing history data in different memory
- * store.
- *
- */
-@Private
-@Unstable
-public class MemoryApplicationTimelineStore
- extends AbstractService implements ApplicationTimelineStore {
-
- private Map entities =
- new HashMap();
-
- public MemoryApplicationTimelineStore() {
- super(MemoryApplicationTimelineStore.class.getName());
- }
-
- @Override
- public ATSEntities getEntities(String entityType, Long limit,
- Long windowStart, Long windowEnd, NameValuePair primaryFilter,
- Collection secondaryFilters, EnumSet fields) {
- if (limit == null) {
- limit = DEFAULT_LIMIT;
- }
- if (windowStart == null) {
- windowStart = Long.MIN_VALUE;
- }
- if (windowEnd == null) {
- windowEnd = Long.MAX_VALUE;
- }
- if (fields == null) {
- fields = EnumSet.allOf(Field.class);
- }
- List entitiesSelected = new ArrayList();
- for (ATSEntity entity : new PriorityQueue(entities.values())) {
- if (entitiesSelected.size() >= limit) {
- break;
- }
- if (!entity.getEntityType().equals(entityType)) {
- continue;
- }
- if (entity.getStartTime() <= windowStart) {
- continue;
- }
- if (entity.getStartTime() > windowEnd) {
- continue;
- }
- if (primaryFilter != null &&
- !matchFilter(entity.getPrimaryFilters(), primaryFilter)) {
- continue;
- }
- if (secondaryFilters != null) { // OR logic
- boolean flag = false;
- for (NameValuePair secondaryFilter : secondaryFilters) {
- if (secondaryFilter != null &&
- matchFilter(entity.getOtherInfo(), secondaryFilter)) {
- flag = true;
- break;
- }
- }
- if (!flag) {
- continue;
- }
- }
- entitiesSelected.add(entity);
- }
- List entitiesToReturn = new ArrayList();
- for (ATSEntity entitySelected : entitiesSelected) {
- entitiesToReturn.add(maskFields(entitySelected, fields));
- }
- Collections.sort(entitiesToReturn);
- ATSEntities entitiesWrapper = new ATSEntities();
- entitiesWrapper.setEntities(entitiesToReturn);
- return entitiesWrapper;
- }
-
- @Override
- public ATSEntity getEntity(String entityId, String entityType,
- EnumSet fieldsToRetrieve) {
- if (fieldsToRetrieve == null) {
- fieldsToRetrieve = EnumSet.allOf(Field.class);
- }
- ATSEntity entity = entities.get(new EntityIdentifier(entityId, entityType));
- if (entity == null) {
- return null;
- } else {
- return maskFields(entity, fieldsToRetrieve);
- }
- }
-
- @Override
- public ATSEvents getEntityTimelines(String entityType,
- SortedSet entityIds, Long limit, Long windowStart,
- Long windowEnd,
- Set eventTypes) {
- ATSEvents allEvents = new ATSEvents();
- if (entityIds == null) {
- return allEvents;
- }
- if (limit == null) {
- limit = DEFAULT_LIMIT;
- }
- if (windowStart == null) {
- windowStart = Long.MIN_VALUE;
- }
- if (windowEnd == null) {
- windowEnd = Long.MAX_VALUE;
- }
- for (String entityId : entityIds) {
- EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
- ATSEntity entity = entities.get(entityID);
- if (entity == null) {
- continue;
- }
- ATSEventsOfOneEntity events = new ATSEventsOfOneEntity();
- events.setEntityId(entityId);
- events.setEntityType(entityType);
- for (ATSEvent event : entity.getEvents()) {
- if (events.getEvents().size() >= limit) {
- break;
- }
- if (event.getTimestamp() <= windowStart) {
- continue;
- }
- if (event.getTimestamp() > windowEnd) {
- continue;
- }
- if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
- continue;
- }
- events.addEvent(event);
- }
- allEvents.addEvent(events);
- }
- return allEvents;
- }
-
- @Override
- public ATSPutErrors put(ATSEntities data) {
- ATSPutErrors errors = new ATSPutErrors();
- for (ATSEntity entity : data.getEntities()) {
- EntityIdentifier entityId =
- new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
- // store entity info in memory
- ATSEntity existingEntity = entities.get(entityId);
- if (existingEntity == null) {
- existingEntity = new ATSEntity();
- existingEntity.setEntityId(entity.getEntityId());
- existingEntity.setEntityType(entity.getEntityType());
- existingEntity.setStartTime(entity.getStartTime());
- entities.put(entityId, existingEntity);
- }
- if (entity.getEvents() != null) {
- if (existingEntity.getEvents() == null) {
- existingEntity.setEvents(entity.getEvents());
- } else {
- existingEntity.addEvents(entity.getEvents());
- }
- Collections.sort(existingEntity.getEvents());
- }
- // check startTime
- if (existingEntity.getStartTime() == null) {
- if (existingEntity.getEvents() == null
- || existingEntity.getEvents().isEmpty()) {
- ATSPutError error = new ATSPutError();
- error.setEntityId(entityId.getId());
- error.setEntityType(entityId.getType());
- error.setErrorCode(ATSPutError.NO_START_TIME);
- errors.addError(error);
- entities.remove(entityId);
- continue;
- } else {
- existingEntity.setStartTime(entity.getEvents().get(0).getTimestamp());
- }
- }
- if (entity.getPrimaryFilters() != null) {
- if (existingEntity.getPrimaryFilters() == null) {
- existingEntity.setPrimaryFilters(entity.getPrimaryFilters());
- } else {
- existingEntity.addPrimaryFilters(entity.getPrimaryFilters());
- }
- }
- if (entity.getOtherInfo() != null) {
- if (existingEntity.getOtherInfo() == null) {
- existingEntity.setOtherInfo(entity.getOtherInfo());
- } else {
- existingEntity.addOtherInfo(entity.getOtherInfo());
- }
- }
- // relate it to other entities
- if (entity.getRelatedEntities() == null) {
- continue;
- }
- for (Map.Entry> partRelatedEntities : entity
- .getRelatedEntities().entrySet()) {
- if (partRelatedEntities == null) {
- continue;
- }
- for (String idStr : partRelatedEntities.getValue()) {
- EntityIdentifier relatedEntityId =
- new EntityIdentifier(idStr, partRelatedEntities.getKey());
- ATSEntity relatedEntity = entities.get(relatedEntityId);
- if (relatedEntity != null) {
- relatedEntity.addRelatedEntity(
- existingEntity.getEntityType(), existingEntity.getEntityId());
- } else {
- relatedEntity = new ATSEntity();
- relatedEntity.setEntityId(relatedEntityId.getId());
- relatedEntity.setEntityType(relatedEntityId.getType());
- relatedEntity.setStartTime(existingEntity.getStartTime());
- relatedEntity.addRelatedEntity(existingEntity.getEntityType(),
- existingEntity.getEntityId());
- entities.put(relatedEntityId, relatedEntity);
- }
- }
- }
- }
- return errors;
- }
-
- private static ATSEntity maskFields(
- ATSEntity entity, EnumSet fields) {
- // Conceal the fields that are not going to be exposed
- ATSEntity entityToReturn = new ATSEntity();
- entityToReturn.setEntityId(entity.getEntityId());
- entityToReturn.setEntityType(entity.getEntityType());
- entityToReturn.setStartTime(entity.getStartTime());
- entityToReturn.setEvents(fields.contains(Field.EVENTS) ?
- entity.getEvents() : fields.contains(Field.LAST_EVENT_ONLY) ?
- Arrays.asList(entity.getEvents().get(0)) : null);
- entityToReturn.setRelatedEntities(fields.contains(Field.RELATED_ENTITIES) ?
- entity.getRelatedEntities() : null);
- entityToReturn.setPrimaryFilters(fields.contains(Field.PRIMARY_FILTERS) ?
- entity.getPrimaryFilters() : null);
- entityToReturn.setOtherInfo(fields.contains(Field.OTHER_INFO) ?
- entity.getOtherInfo() : null);
- return entityToReturn;
- }
-
- private static boolean matchFilter(Map tags,
- NameValuePair filter) {
- Object value = tags.get(filter.getName());
- if (value == null) { // doesn't have the filter
- return false;
- } else if (!value.equals(filter.getValue())) { // doesn't match the filter
- return false;
- }
- return true;
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/NameValuePair.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/NameValuePair.java
deleted file mode 100644
index 66a21bb..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/NameValuePair.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * A class holding a name and value pair, used for specifying filters in
- * {@link ApplicationTimelineReader}.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class NameValuePair {
- String name;
- Object value;
-
- public NameValuePair(String name, Object value) {
- this.name = name;
- this.value = value;
- }
-
- /**
- * Get the name.
- * @return The name.
- */
- public String getName() {
-
- return name;
- }
-
- /**
- * Get the value.
- * @return The value.
- */
- public Object getValue() {
- return value;
- }
-
- @Override
- public String toString() {
- return "{ name: " + name + ", value: " + value + " }";
- }
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/package-info.java
deleted file mode 100644
index c3aaafe..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/package-info.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-@InterfaceAudience.Private
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-import org.apache.hadoop.classification.InterfaceAudience;
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java
new file mode 100644
index 0000000..4b202d8
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ * The unique identifier for an entity
+ */
+@Private
+@Unstable
+public class EntityIdentifier implements Comparable {
+
+ private String id;
+ private String type;
+
+ public EntityIdentifier(String id, String type) {
+ this.id = id;
+ this.type = type;
+ }
+
+ /**
+ * Get the entity Id.
+ * @return The entity Id.
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * Get the entity type.
+ * @return The entity type.
+ */
+ public String getType() {
+ return type;
+ }
+
+ @Override
+ public int compareTo(EntityIdentifier other) {
+ int c = type.compareTo(other.type);
+ if (c != 0) return c;
+ return id.compareTo(other.id);
+ }
+
+ @Override
+ public int hashCode() {
+ // generated by eclipse
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((id == null) ? 0 : id.hashCode());
+ result = prime * result + ((type == null) ? 0 : type.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // generated by eclipse
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ EntityIdentifier other = (EntityIdentifier) obj;
+ if (id == null) {
+ if (other.id != null)
+ return false;
+ } else if (!id.equals(other.id))
+ return false;
+ if (type == null) {
+ if (other.type != null)
+ return false;
+ } else if (!type.equals(other.type))
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "{ id: " + id + ", type: "+ type + " }";
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java
new file mode 100644
index 0000000..7d1c54b
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.WritableUtils;
+import org.codehaus.jackson.map.ObjectMapper;
+
+/**
+ * A utility class providing methods for serializing and deserializing
+ * objects. The {@link #write(Object)}, {@link #read(byte[])} and {@link
+ * #write(java.io.DataOutputStream, Object)}, {@link
+ * #read(java.io.DataInputStream)} methods are used by the
+ * {@link LeveldbTimelineStore} to store and retrieve arbitrary
+ * JSON, while the {@link #writeReverseOrderedLong} and {@link
+ * #readReverseOrderedLong} methods are used to sort entities in descending
+ * start time order.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class GenericObjectMapper {
+ private static final byte[] EMPTY_BYTES = new byte[0];
+
+ private static final byte LONG = 0x1;
+ private static final byte INTEGER = 0x2;
+ private static final byte DOUBLE = 0x3;
+ private static final byte STRING = 0x4;
+ private static final byte BOOLEAN = 0x5;
+ private static final byte LIST = 0x6;
+ private static final byte MAP = 0x7;
+
+ /**
+ * Serializes an Object into a byte array. Along with {@link #read(byte[]) },
+ * can be used to serialize an Object and deserialize it into an Object of
+ * the same type without needing to specify the Object's type,
+ * as long as it is one of the JSON-compatible objects Long, Integer,
+ * Double, String, Boolean, List, or Map. The current implementation uses
+ * ObjectMapper to serialize complex objects (List and Map) while using
+ * Writable to serialize simpler objects, to produce fewer bytes.
+ *
+ * @param o An Object
+ * @return A byte array representation of the Object
+ * @throws IOException
+ */
+ public static byte[] write(Object o) throws IOException {
+ if (o == null)
+ return EMPTY_BYTES;
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ write(new DataOutputStream(baos), o);
+ return baos.toByteArray();
+ }
+
+ /**
+ * Serializes an Object and writes it to a DataOutputStream. Along with
+ * {@link #read(java.io.DataInputStream)}, can be used to serialize an Object
+ * and deserialize it into an Object of the same type without needing to
+ * specify the Object's type, as long as it is one of the JSON-compatible
+ * objects Long, Integer, Double, String, Boolean, List, or Map. The current
+ * implementation uses ObjectMapper to serialize complex objects (List and
+ * Map) while using Writable to serialize simpler objects, to produce fewer
+ * bytes.
+ *
+ * @param dos A DataOutputStream
+ * @param o An Object
+ * @throws IOException
+ */
+ public static void write(DataOutputStream dos, Object o)
+ throws IOException {
+ if (o == null)
+ return;
+ if (o instanceof Long) {
+ dos.write(LONG);
+ WritableUtils.writeVLong(dos, (Long) o);
+ } else if(o instanceof Integer) {
+ dos.write(INTEGER);
+ WritableUtils.writeVInt(dos, (Integer) o);
+ } else if(o instanceof Double) {
+ dos.write(DOUBLE);
+ dos.writeDouble((Double) o);
+ } else if (o instanceof String) {
+ dos.write(STRING);
+ WritableUtils.writeString(dos, (String) o);
+ } else if (o instanceof Boolean) {
+ dos.write(BOOLEAN);
+ dos.writeBoolean((Boolean) o);
+ } else if (o instanceof List) {
+ dos.write(LIST);
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.writeValue(dos, o);
+ } else if (o instanceof Map) {
+ dos.write(MAP);
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.writeValue(dos, o);
+ } else {
+ throw new IOException("Couldn't serialize object");
+ }
+ }
+
+ /**
+ * Deserializes an Object from a byte array created with
+ * {@link #write(Object)}.
+ *
+ * @param b A byte array
+ * @return An Object
+ * @throws IOException
+ */
+ public static Object read(byte[] b) throws IOException {
+ return read(b, 0);
+ }
+
+ /**
+ * Deserializes an Object from a byte array at a specified offset, assuming
+ * the bytes were created with {@link #write(Object)}.
+ *
+ * @param b A byte array
+ * @param offset Offset into the array
+ * @return An Object
+ * @throws IOException
+ */
+ public static Object read(byte[] b, int offset) throws IOException {
+ if (b == null || b.length == 0) {
+ return null;
+ }
+ ByteArrayInputStream bais = new ByteArrayInputStream(b, offset,
+ b.length - offset);
+ return read(new DataInputStream(bais));
+ }
+
+ /**
+ * Reads an Object from a DataInputStream whose data has been written with
+ * {@link #write(java.io.DataOutputStream, Object)}.
+ *
+ * @param dis A DataInputStream
+ * @return An Object, null if an unrecognized type
+ * @throws IOException
+ */
+ public static Object read(DataInputStream dis) throws IOException {
+ byte code = (byte)dis.read();
+ ObjectMapper mapper;
+ switch (code) {
+ case LONG:
+ return WritableUtils.readVLong(dis);
+ case INTEGER:
+ return WritableUtils.readVInt(dis);
+ case DOUBLE:
+ return dis.readDouble();
+ case STRING:
+ return WritableUtils.readString(dis);
+ case BOOLEAN:
+ return dis.readBoolean();
+ case LIST:
+ mapper = new ObjectMapper();
+ return mapper.readValue(dis, ArrayList.class);
+ case MAP:
+ mapper = new ObjectMapper();
+ return mapper.readValue(dis, HashMap.class);
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Converts a long to a 8-byte array so that lexicographic ordering of the
+ * produced byte arrays sort the longs in descending order.
+ *
+ * @param l A long
+ * @return A byte array
+ */
+ public static byte[] writeReverseOrderedLong(long l) {
+ byte[] b = new byte[8];
+ b[0] = (byte)(0x7f ^ ((l >> 56) & 0xff));
+ for (int i = 1; i < 7; i++)
+ b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff));
+ b[7] = (byte)(0xff ^ (l & 0xff));
+ return b;
+ }
+
+ /**
+ * Reads 8 bytes from an array starting at the specified offset and
+ * converts them to a long. The bytes are assumed to have been created
+ * with {@link #writeReverseOrderedLong}.
+ *
+ * @param b A byte array
+ * @param offset An offset into the byte array
+ * @return A long
+ */
+ public static long readReverseOrderedLong(byte[] b, int offset) {
+ long l = b[offset] & 0xff;
+ for (int i = 1; i < 8; i++) {
+ l = l << 8;
+ l = l | (b[offset+i]&0xff);
+ }
+ return l ^ 0x7fffffffffffffffl;
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
new file mode 100644
index 0000000..76b53e0
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
@@ -0,0 +1,873 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeMap;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.collections.map.LRUMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Event;
+import org.apache.hadoop.yarn.api.records.timeline.Events;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
+import org.apache.hadoop.yarn.api.records.timeline.Events.EventsOfOneEntity;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors.PutError;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.fusesource.leveldbjni.JniDBFactory;
+import org.iq80.leveldb.DB;
+import org.iq80.leveldb.DBIterator;
+import org.iq80.leveldb.Options;
+import org.iq80.leveldb.WriteBatch;
+
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.readReverseOrderedLong;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
+
+/**
+ * An implementation of a timeline store backed by leveldb.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class LeveldbTimelineStore extends AbstractService
+ implements TimelineStore {
+ private static final Log LOG = LogFactory
+ .getLog(LeveldbTimelineStore.class);
+
+ private static final String FILENAME = "leveldb-timeline-store.ldb";
+
+ private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes();
+ private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes();
+ private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes();
+
+ private static final byte[] PRIMARY_FILTER_COLUMN = "f".getBytes();
+ private static final byte[] OTHER_INFO_COLUMN = "i".getBytes();
+ private static final byte[] RELATED_COLUMN = "r".getBytes();
+ private static final byte[] TIME_COLUMN = "t".getBytes();
+
+ private static final byte[] EMPTY_BYTES = new byte[0];
+
+ private static final int START_TIME_CACHE_SIZE = 10000;
+
+ @SuppressWarnings("unchecked")
+ private final Map startTimeCache =
+ Collections.synchronizedMap(new LRUMap(START_TIME_CACHE_SIZE));
+
+ private DB db;
+
+ public LeveldbTimelineStore() {
+ super(LeveldbTimelineStore.class.getName());
+ }
+
+ @Override
+ protected void serviceInit(Configuration conf) throws Exception {
+ Options options = new Options();
+ options.createIfMissing(true);
+ JniDBFactory factory = new JniDBFactory();
+ String path = conf.get(YarnConfiguration.TIMELINE_LEVELDB_PATH_PROPERTY);
+ File p = new File(path);
+ if (!p.exists())
+ if (!p.mkdirs())
+ throw new IOException("Couldn't create directory for leveldb " +
+ "timeline store " + path);
+ LOG.info("Using leveldb path " + path);
+ db = factory.open(new File(path, FILENAME), options);
+ super.serviceInit(conf);
+ }
+
+ @Override
+ protected void serviceStop() throws Exception {
+ IOUtils.cleanup(LOG, db);
+ super.serviceStop();
+ }
+
+ private static class KeyBuilder {
+ private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10;
+ private byte[][] b;
+ private boolean[] useSeparator;
+ private int index;
+ private int length;
+
+ public KeyBuilder(int size) {
+ b = new byte[size][];
+ useSeparator = new boolean[size];
+ index = 0;
+ length = 0;
+ }
+
+ public static KeyBuilder newInstance() {
+ return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS);
+ }
+
+ public KeyBuilder add(String s) {
+ return add(s.getBytes(), true);
+ }
+
+ public KeyBuilder add(byte[] t) {
+ return add(t, false);
+ }
+
+ public KeyBuilder add(byte[] t, boolean sep) {
+ b[index] = t;
+ useSeparator[index] = sep;
+ length += t.length;
+ if (sep)
+ length++;
+ index++;
+ return this;
+ }
+
+ public byte[] getBytes() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream(length);
+ for (int i = 0; i < index; i++) {
+ baos.write(b[i]);
+ if (i < index-1 && useSeparator[i])
+ baos.write(0x0);
+ }
+ return baos.toByteArray();
+ }
+
+ public byte[] getBytesForLookup() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream(length);
+ for (int i = 0; i < index; i++) {
+ baos.write(b[i]);
+ if (useSeparator[i])
+ baos.write(0x0);
+ }
+ return baos.toByteArray();
+ }
+ }
+
+ private static class KeyParser {
+ private final byte[] b;
+ private int offset;
+
+ public KeyParser(byte[] b, int offset) {
+ this.b = b;
+ this.offset = offset;
+ }
+
+ public String getNextString() throws IOException {
+ if (offset >= b.length)
+ throw new IOException(
+ "tried to read nonexistent string from byte array");
+ int i = 0;
+ while (offset+i < b.length && b[offset+i] != 0x0)
+ i++;
+ String s = new String(b, offset, i);
+ offset = offset + i + 1;
+ return s;
+ }
+
+ public long getNextLong() throws IOException {
+ if (offset+8 >= b.length)
+ throw new IOException("byte array ran out when trying to read long");
+ long l = readReverseOrderedLong(b, offset);
+ offset += 8;
+ return l;
+ }
+
+ public int getOffset() {
+ return offset;
+ }
+ }
+
+ @Override
+ public Entity getEntity(String entityId, String entityType,
+ EnumSet fields) throws IOException {
+ DBIterator iterator = null;
+ try {
+ byte[] revStartTime = getStartTime(entityId, entityType, null, null, null);
+ if (revStartTime == null)
+ return null;
+ byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
+ .add(entityType).add(revStartTime).add(entityId).getBytesForLookup();
+
+ iterator = db.iterator();
+ iterator.seek(prefix);
+
+ return getEntity(entityId, entityType,
+ readReverseOrderedLong(revStartTime, 0), fields, iterator, prefix,
+ prefix.length);
+ } finally {
+ IOUtils.cleanup(LOG, iterator);
+ }
+ }
+
+ /**
+ * Read entity from a db iterator. If no information is found in the
+ * specified fields for this entity, return null.
+ */
+ private static Entity getEntity(String entityId, String entityType,
+ Long startTime, EnumSet fields, DBIterator iterator,
+ byte[] prefix, int prefixlen) throws IOException {
+ if (fields == null)
+ fields = EnumSet.allOf(Field.class);
+
+ Entity entity = new Entity();
+ boolean events = false;
+ boolean lastEvent = false;
+ if (fields.contains(Field.EVENTS)) {
+ events = true;
+ entity.setEvents(new ArrayList());
+ } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
+ lastEvent = true;
+ entity.setEvents(new ArrayList());
+ }
+ else {
+ entity.setEvents(null);
+ }
+ boolean relatedEntities = false;
+ if (fields.contains(Field.RELATED_ENTITIES)) {
+ relatedEntities = true;
+ } else {
+ entity.setRelatedEntities(null);
+ }
+ boolean primaryFilters = false;
+ if (fields.contains(Field.PRIMARY_FILTERS)) {
+ primaryFilters = true;
+ } else {
+ entity.setPrimaryFilters(null);
+ }
+ boolean otherInfo = false;
+ if (fields.contains(Field.OTHER_INFO)) {
+ otherInfo = true;
+ entity.setOtherInfo(new HashMap());
+ } else {
+ entity.setOtherInfo(null);
+ }
+
+ // iterate through the entity's entry, parsing information if it is part
+ // of a requested field
+ for (; iterator.hasNext(); iterator.next()) {
+ byte[] key = iterator.peekNext().getKey();
+ if (!prefixMatches(prefix, prefixlen, key))
+ break;
+ if (key[prefixlen] == PRIMARY_FILTER_COLUMN[0]) {
+ if (primaryFilters) {
+ addPrimaryFilter(entity, key,
+ prefixlen + PRIMARY_FILTER_COLUMN.length);
+ }
+ } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
+ if (otherInfo) {
+ entity.addOtherInfo(parseRemainingKey(key,
+ prefixlen + OTHER_INFO_COLUMN.length),
+ GenericObjectMapper.read(iterator.peekNext().getValue()));
+ }
+ } else if (key[prefixlen] == RELATED_COLUMN[0]) {
+ if (relatedEntities) {
+ addRelatedEntity(entity, key,
+ prefixlen + RELATED_COLUMN.length);
+ }
+ } else if (key[prefixlen] == TIME_COLUMN[0]) {
+ if (events || (lastEvent && entity.getEvents().size() == 0)) {
+ Event event = getEntityEvent(null, key, prefixlen +
+ TIME_COLUMN.length, iterator.peekNext().getValue());
+ if (event != null) {
+ entity.addEvent(event);
+ }
+ }
+ } else {
+ LOG.warn(String.format("Found unexpected column for entity %s of " +
+ "type %s (0x%02x)", entityId, entityType, key[prefixlen]));
+ }
+ }
+
+ entity.setEntityId(entityId);
+ entity.setEntityType(entityType);
+ entity.setStartTime(startTime);
+
+ return entity;
+ }
+
+ @Override
+ public Events getEntityTimelines(String entityType,
+ SortedSet entityIds, Long limit, Long windowStart,
+ Long windowEnd, Set eventType) throws IOException {
+ Events events = new Events();
+ if (entityIds == null || entityIds.isEmpty())
+ return events;
+ // create a lexicographically-ordered map from start time to entities
+ Map> startTimeMap = new TreeMap>(new Comparator() {
+ @Override
+ public int compare(byte[] o1, byte[] o2) {
+ return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0,
+ o2.length);
+ }
+ });
+ DBIterator iterator = null;
+ try {
+ // look up start times for the specified entities
+ // skip entities with no start time
+ for (String entity : entityIds) {
+ byte[] startTime = getStartTime(entity, entityType, null, null, null);
+ if (startTime != null) {
+ List entities = startTimeMap.get(startTime);
+ if (entities == null) {
+ entities = new ArrayList();
+ startTimeMap.put(startTime, entities);
+ }
+ entities.add(new EntityIdentifier(entity, entityType));
+ }
+ }
+ for (Entry> entry :
+ startTimeMap.entrySet()) {
+ // look up the events matching the given parameters (limit,
+ // start time, end time, event types) for entities whose start times
+ // were found and add the entities to the return list
+ byte[] revStartTime = entry.getKey();
+ for (EntityIdentifier entityID : entry.getValue()) {
+ EventsOfOneEntity entity = new EventsOfOneEntity();
+ entity.setEntityId(entityID.getId());
+ entity.setEntityType(entityType);
+ events.addEvent(entity);
+ KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
+ .add(entityType).add(revStartTime).add(entityID.getId())
+ .add(TIME_COLUMN);
+ byte[] prefix = kb.getBytesForLookup();
+ if (windowEnd == null) {
+ windowEnd = Long.MAX_VALUE;
+ }
+ byte[] revts = writeReverseOrderedLong(windowEnd);
+ kb.add(revts);
+ byte[] first = kb.getBytesForLookup();
+ byte[] last = null;
+ if (windowStart != null) {
+ last = KeyBuilder.newInstance().add(prefix)
+ .add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
+ }
+ if (limit == null) {
+ limit = DEFAULT_LIMIT;
+ }
+ iterator = db.iterator();
+ for (iterator.seek(first); entity.getEvents().size() < limit &&
+ iterator.hasNext(); iterator.next()) {
+ byte[] key = iterator.peekNext().getKey();
+ if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
+ WritableComparator.compareBytes(key, 0, key.length, last, 0,
+ last.length) > 0))
+ break;
+ Event event = getEntityEvent(eventType, key, prefix.length,
+ iterator.peekNext().getValue());
+ if (event != null)
+ entity.addEvent(event);
+ }
+ }
+ }
+ } finally {
+ IOUtils.cleanup(LOG, iterator);
+ }
+ return events;
+ }
+
+ /**
+ * Returns true if the byte array begins with the specified prefix.
+ */
+ private static boolean prefixMatches(byte[] prefix, int prefixlen,
+ byte[] b) {
+ if (b.length < prefixlen)
+ return false;
+ return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0,
+ prefixlen) == 0;
+ }
+
+ @Override
+ public Entities getEntities(String entityType,
+ Long limit, Long windowStart, Long windowEnd,
+ NameValuePair primaryFilter, Collection secondaryFilters,
+ EnumSet fields) throws IOException {
+ if (primaryFilter == null) {
+ // if no primary filter is specified, prefix the lookup with
+ // ENTITY_ENTRY_PREFIX
+ return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit,
+ windowStart, windowEnd, secondaryFilters, fields);
+ } else {
+ // if a primary filter is specified, prefix the lookup with
+ // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue +
+ // ENTITY_ENTRY_PREFIX
+ byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX)
+ .add(primaryFilter.getName())
+ .add(GenericObjectMapper.write(primaryFilter.getValue()), true)
+ .add(ENTITY_ENTRY_PREFIX).getBytesForLookup();
+ return getEntityByTime(base, entityType, limit, windowStart, windowEnd,
+ secondaryFilters, fields);
+ }
+ }
+
+ /**
+ * Retrieves a list of entities satisfying given parameters.
+ *
+ * @param base A byte array prefix for the lookup
+ * @param entityType The type of the entity
+ * @param limit A limit on the number of entities to return
+ * @param starttime The earliest entity start time to retrieve (exclusive)
+ * @param endtime The latest entity start time to retrieve (inclusive)
+ * @param secondaryFilters Filter pairs that the entities should match
+ * @param fields The set of fields to retrieve
+ * @return A list of entities
+ * @throws IOException
+ */
+ private Entities getEntityByTime(byte[] base,
+ String entityType, Long limit, Long starttime, Long endtime,
+ Collection secondaryFilters, EnumSet fields)
+ throws IOException {
+ DBIterator iterator = null;
+ try {
+ KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
+ // only db keys matching the prefix (base + entity type) will be parsed
+ byte[] prefix = kb.getBytesForLookup();
+ if (endtime == null) {
+ // if end time is null, place no restriction on end time
+ endtime = Long.MAX_VALUE;
+ }
+ // using end time, construct a first key that will be seeked to
+ byte[] revts = writeReverseOrderedLong(endtime);
+ kb.add(revts);
+ byte[] first = kb.getBytesForLookup();
+ byte[] last = null;
+ if (starttime != null) {
+ // if start time is not null, set a last key that will not be
+ // iterated past
+ last = KeyBuilder.newInstance().add(base).add(entityType)
+ .add(writeReverseOrderedLong(starttime)).getBytesForLookup();
+ }
+ if (limit == null) {
+ // if limit is not specified, use the default
+ limit = DEFAULT_LIMIT;
+ }
+
+ Entities entities = new Entities();
+ iterator = db.iterator();
+ iterator.seek(first);
+ // iterate until one of the following conditions is met: limit is
+ // reached, there are no more keys, the key prefix no longer matches,
+ // or a start time has been specified and reached/exceeded
+ while (entities.getEntities().size() < limit && iterator.hasNext()) {
+ byte[] key = iterator.peekNext().getKey();
+ if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
+ WritableComparator.compareBytes(key, 0, key.length, last, 0,
+ last.length) > 0))
+ break;
+ // read the start time and entityId from the current key
+ KeyParser kp = new KeyParser(key, prefix.length);
+ Long startTime = kp.getNextLong();
+ String entityId = kp.getNextString();
+ // parse the entity that owns this key, iterating over all keys for
+ // the entity
+ Entity entity = getEntity(entityId, entityType, startTime,
+ fields, iterator, key, kp.getOffset());
+ if (entity == null)
+ continue;
+ // determine if the retrieved entity matches the provided secondary
+ // filters, and if so add it to the list of entities to return
+ boolean filterPassed = true;
+ if (secondaryFilters != null) {
+ for (NameValuePair filter : secondaryFilters) {
+ Object v = entity.getOtherInfo().get(filter.getName());
+ if (v == null) {
+ Set vs = entity.getPrimaryFilters()
+ .get(filter.getName());
+ if (vs != null && !vs.contains(filter.getValue())) {
+ filterPassed = false;
+ break;
+ }
+ } else if (!v.equals(filter.getValue())) {
+ filterPassed = false;
+ break;
+ }
+ }
+ }
+ if (filterPassed)
+ entities.addEntity(entity);
+ }
+ return entities;
+ } finally {
+ IOUtils.cleanup(LOG, iterator);
+ }
+ }
+
+ /**
+ * Put a single entity. If there is an error, add a PutError to the given
+ * response.
+ */
+ private void put(Entity entity, PutErrors response) {
+ WriteBatch writeBatch = null;
+ try {
+ writeBatch = db.createWriteBatch();
+ List events = entity.getEvents();
+ // look up the start time for the entity
+ byte[] revStartTime = getStartTime(entity.getEntityId(),
+ entity.getEntityType(), entity.getStartTime(), events,
+ writeBatch);
+ if (revStartTime == null) {
+ // if no start time is found, add an error and return
+ PutError error = new PutError();
+ error.setEntityId(entity.getEntityId());
+ error.setEntityType(entity.getEntityType());
+ error.setErrorCode(PutError.NO_START_TIME);
+ response.addError(error);
+ return;
+ }
+ Long revStartTimeLong = readReverseOrderedLong(revStartTime, 0);
+ Map> primaryFilters = entity.getPrimaryFilters();
+
+ // write event entries
+ if (events != null && !events.isEmpty()) {
+ for (Event event : events) {
+ byte[] revts = writeReverseOrderedLong(event.getTimestamp());
+ byte[] key = createEntityEventKey(entity.getEntityId(),
+ entity.getEntityType(), revStartTime, revts,
+ event.getEventType());
+ byte[] value = GenericObjectMapper.write(event.getEventInfo());
+ writeBatch.put(key, value);
+ writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
+ }
+ }
+
+ // write related entity entries
+ Map> relatedEntities =
+ entity.getRelatedEntities();
+ if (relatedEntities != null && !relatedEntities.isEmpty()) {
+ for (Entry> relatedEntityList :
+ relatedEntities.entrySet()) {
+ String relatedEntityType = relatedEntityList.getKey();
+ for (String relatedEntityId : relatedEntityList.getValue()) {
+ // look up start time of related entity
+ byte[] relatedEntityStartTime = getStartTime(relatedEntityId,
+ relatedEntityType, null, null, writeBatch);
+ if (relatedEntityStartTime == null) {
+ // if start time is not found, set start time of the related
+ // entity to the start time of this entity, and write it to the
+ // db and the cache
+ relatedEntityStartTime = revStartTime;
+ writeBatch.put(createStartTimeLookupKey(relatedEntityId,
+ relatedEntityType), relatedEntityStartTime);
+ startTimeCache.put(new EntityIdentifier(relatedEntityId,
+ relatedEntityType), revStartTimeLong);
+ }
+ // write reverse entry (related entity -> entity)
+ byte[] key = createReleatedEntityKey(relatedEntityId,
+ relatedEntityType, relatedEntityStartTime,
+ entity.getEntityId(), entity.getEntityType());
+ writeBatch.put(key, EMPTY_BYTES);
+ // TODO: write forward entry (entity -> related entity)?
+ }
+ }
+ }
+
+ // write primary filter entries
+ if (primaryFilters != null && !primaryFilters.isEmpty()) {
+ for (Entry> primaryFilter :
+ primaryFilters.entrySet()) {
+ for (Object primaryFilterValue : primaryFilter.getValue()) {
+ byte[] key = createPrimaryFilterKey(entity.getEntityId(),
+ entity.getEntityType(), revStartTime,
+ primaryFilter.getKey(), primaryFilterValue);
+ writeBatch.put(key, EMPTY_BYTES);
+ writePrimaryFilterEntries(writeBatch, primaryFilters, key,
+ EMPTY_BYTES);
+ }
+ }
+ }
+
+ // write other info entries
+ Map otherInfo = entity.getOtherInfo();
+ if (otherInfo != null && !otherInfo.isEmpty()) {
+ for (Entry i : otherInfo.entrySet()) {
+ byte[] key = createOtherInfoKey(entity.getEntityId(),
+ entity.getEntityType(), revStartTime, i.getKey());
+ byte[] value = GenericObjectMapper.write(i.getValue());
+ writeBatch.put(key, value);
+ writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
+ }
+ }
+ db.write(writeBatch);
+ } catch (IOException e) {
+ LOG.error("Error putting entity " + entity.getEntityId() +
+ " of type " + entity.getEntityType(), e);
+ PutError error = new PutError();
+ error.setEntityId(entity.getEntityId());
+ error.setEntityType(entity.getEntityType());
+ error.setErrorCode(PutError.IO_EXCEPTION);
+ response.addError(error);
+ } finally {
+ IOUtils.cleanup(LOG, writeBatch);
+ }
+ }
+
+ /**
+ * For a given key / value pair that has been written to the db,
+ * write additional entries to the db for each primary filter.
+ */
+ private static void writePrimaryFilterEntries(WriteBatch writeBatch,
+ Map> primaryFilters, byte[] key, byte[] value)
+ throws IOException {
+ if (primaryFilters != null && !primaryFilters.isEmpty()) {
+ for (Entry> pf : primaryFilters.entrySet()) {
+ for (Object pfval : pf.getValue()) {
+ writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval,
+ key), value);
+ }
+ }
+ }
+ }
+
+ @Override
+ public PutErrors put(Entities entities) {
+ PutErrors response = new PutErrors();
+ for (Entity entity : entities.getEntities()) {
+ put(entity, response);
+ }
+ return response;
+ }
+
+ /**
+ * Get the unique start time for a given entity as a byte array that sorts
+ * the timestamps in reverse order (see {@link
+ * GenericObjectMapper#writeReverseOrderedLong(long)}).
+ *
+ * @param entityId The id of the entity
+ * @param entityType The type of the entity
+ * @param startTime The start time of the entity, or null
+ * @param events A list of events for the entity, or null
+ * @param writeBatch A leveldb write batch, if the method is called by a
+ * put as opposed to a get
+ * @return A byte array
+ * @throws IOException
+ */
+ private byte[] getStartTime(String entityId, String entityType,
+ Long startTime, List events, WriteBatch writeBatch)
+ throws IOException {
+ EntityIdentifier entity = new EntityIdentifier(entityId, entityType);
+ if (startTime == null) {
+ // start time is not provided, so try to look it up
+ if (startTimeCache.containsKey(entity)) {
+ // found the start time in the cache
+ startTime = startTimeCache.get(entity);
+ } else {
+ // try to look up the start time in the db
+ byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType());
+ byte[] v = db.get(b);
+ if (v == null) {
+ // did not find the start time in the db
+ // if this is a put, try to set it from the provided events
+ if (events == null || writeBatch == null) {
+ // no events, or not a put, so return null
+ return null;
+ }
+ Long min = Long.MAX_VALUE;
+ for (Event e : events)
+ if (min > e.getTimestamp())
+ min = e.getTimestamp();
+ startTime = min;
+ // selected start time as minimum timestamp of provided events
+ // write start time to db and cache
+ writeBatch.put(b, writeReverseOrderedLong(startTime));
+ startTimeCache.put(entity, startTime);
+ } else {
+ // found the start time in the db
+ startTime = readReverseOrderedLong(v, 0);
+ if (writeBatch != null) {
+ // if this is a put, re-add the start time to the cache
+ startTimeCache.put(entity, startTime);
+ }
+ }
+ }
+ } else {
+ // start time is provided
+ // TODO: verify start time in db as well as cache?
+ if (startTimeCache.containsKey(entity)) {
+ // if the start time is already in the cache,
+ // and it is different from the provided start time,
+ // use the one from the cache
+ if (!startTime.equals(startTimeCache.get(entity)))
+ startTime = startTimeCache.get(entity);
+ } else if (writeBatch != null) {
+ // if this is a put, write the provided start time to the db and the
+ // cache
+ byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType());
+ writeBatch.put(b, writeReverseOrderedLong(startTime));
+ startTimeCache.put(entity, startTime);
+ }
+ }
+ return writeReverseOrderedLong(startTime);
+ }
+
+ /**
+ * Creates a key for looking up the start time of a given entity,
+ * of the form START_TIME_LOOKUP_PREFIX + entitytype + entity.
+ */
+ private static byte[] createStartTimeLookupKey(String entity,
+ String entitytype) throws IOException {
+ return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX)
+ .add(entitytype).add(entity).getBytes();
+ }
+
+ /**
+ * Creates an index entry for the given key of the form
+ * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key.
+ */
+ private static byte[] addPrimaryFilterToKey(String primaryFilterName,
+ Object primaryFilterValue, byte[] key) throws IOException {
+ return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX)
+ .add(primaryFilterName)
+ .add(GenericObjectMapper.write(primaryFilterValue), true).add(key)
+ .getBytes();
+ }
+
+ /**
+ * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entitytype +
+ * revstarttime + entity + TIME_COLUMN + reveventtimestamp + eventtype.
+ */
+ private static byte[] createEntityEventKey(String entity, String entitytype,
+ byte[] revStartTime, byte[] reveventtimestamp, String eventtype)
+ throws IOException {
+ return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
+ .add(entitytype).add(revStartTime).add(entity).add(TIME_COLUMN)
+ .add(reveventtimestamp).add(eventtype).getBytes();
+ }
+
+ /**
+ * Creates an event object from the given key, offset, and value. If the
+ * event type is not contained in the specified set of event types,
+ * returns null.
+ */
+ private static Event getEntityEvent(Set eventTypes, byte[] key,
+ int offset, byte[] value) throws IOException {
+ KeyParser kp = new KeyParser(key, offset);
+ long ts = kp.getNextLong();
+ String tstype = kp.getNextString();
+ if (eventTypes == null || eventTypes.contains(tstype)) {
+ Event event = new Event();
+ event.setTimestamp(ts);
+ event.setEventType(tstype);
+ Object o = GenericObjectMapper.read(value);
+ if (o == null) {
+ event.setEventInfo(null);
+ } else if (o instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map m = (Map) o;
+ event.setEventInfo(m);
+ } else {
+ throw new IOException("Couldn't deserialize event info map");
+ }
+ return event;
+ }
+ return null;
+ }
+
+ /**
+ * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX +
+ * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name + value.
+ */
+ private static byte[] createPrimaryFilterKey(String entity,
+ String entitytype, byte[] revStartTime, String name, Object value)
+ throws IOException {
+ return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype)
+ .add(revStartTime).add(entity).add(PRIMARY_FILTER_COLUMN).add(name)
+ .add(GenericObjectMapper.write(value)).getBytes();
+ }
+
+ /**
+ * Parses the primary filter from the given key at the given offset and
+ * adds it to the given entity.
+ */
+ private static void addPrimaryFilter(Entity entity, byte[] key,
+ int offset) throws IOException {
+ KeyParser kp = new KeyParser(key, offset);
+ String name = kp.getNextString();
+ Object value = GenericObjectMapper.read(key, kp.getOffset());
+ entity.addPrimaryFilter(name, value);
+ }
+
+ /**
+ * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entitytype +
+ * revstarttime + entity + OTHER_INFO_COLUMN + name.
+ */
+ private static byte[] createOtherInfoKey(String entity, String entitytype,
+ byte[] revStartTime, String name) throws IOException {
+ return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype)
+ .add(revStartTime).add(entity).add(OTHER_INFO_COLUMN).add(name)
+ .getBytes();
+ }
+
+ /**
+ * Creates a string representation of the byte array from the given offset
+ * to the end of the array (for parsing other info keys).
+ */
+ private static String parseRemainingKey(byte[] b, int offset) {
+ return new String(b, offset, b.length - offset);
+ }
+
+ /**
+ * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX +
+ * entitytype + revstarttime + entity + RELATED_COLUMN + relatedentitytype +
+ * relatedentity.
+ */
+ private static byte[] createReleatedEntityKey(String entity,
+ String entitytype, byte[] revStartTime, String relatedEntity,
+ String relatedEntityType) throws IOException {
+ return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype)
+ .add(revStartTime).add(entity).add(RELATED_COLUMN)
+ .add(relatedEntityType).add(relatedEntity).getBytes();
+ }
+
+ /**
+ * Parses the related entity from the given key at the given offset and
+ * adds it to the given entity.
+ */
+ private static void addRelatedEntity(Entity entity, byte[] key,
+ int offset) throws IOException {
+ KeyParser kp = new KeyParser(key, offset);
+ String type = kp.getNextString();
+ String id = kp.getNextString();
+ entity.addRelatedEntity(type, id);
+ }
+
+ /**
+ * Clears the cache to test reloading start times from leveldb (only for
+ * testing).
+ */
+ @VisibleForTesting
+ void clearStartTimeCache() {
+ startTimeCache.clear();
+ }
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java
new file mode 100644
index 0000000..eb15480
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java
@@ -0,0 +1,306 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Set;
+import java.util.SortedSet;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Event;
+import org.apache.hadoop.yarn.api.records.timeline.Events;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
+import org.apache.hadoop.yarn.api.records.timeline.Events.EventsOfOneEntity;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors.PutError;
+
+/**
+ * In-memory implementation of {@link TimelineStore}. This
+ * implementation is for test purpose only. If users improperly instantiate it,
+ * they may encounter reading and writing history data in different memory
+ * store.
+ *
+ */
+@Private
+@Unstable
+public class MemoryTimelineStore
+ extends AbstractService implements TimelineStore {
+
+ private Map entities =
+ new HashMap();
+
+ public MemoryTimelineStore() {
+ super(MemoryTimelineStore.class.getName());
+ }
+
+ @Override
+ public Entities getEntities(String entityType, Long limit,
+ Long windowStart, Long windowEnd, NameValuePair primaryFilter,
+ Collection secondaryFilters, EnumSet fields) {
+ if (limit == null) {
+ limit = DEFAULT_LIMIT;
+ }
+ if (windowStart == null) {
+ windowStart = Long.MIN_VALUE;
+ }
+ if (windowEnd == null) {
+ windowEnd = Long.MAX_VALUE;
+ }
+ if (fields == null) {
+ fields = EnumSet.allOf(Field.class);
+ }
+ List entitiesSelected = new ArrayList();
+ for (Entity entity : new PriorityQueue(entities.values())) {
+ if (entitiesSelected.size() >= limit) {
+ break;
+ }
+ if (!entity.getEntityType().equals(entityType)) {
+ continue;
+ }
+ if (entity.getStartTime() <= windowStart) {
+ continue;
+ }
+ if (entity.getStartTime() > windowEnd) {
+ continue;
+ }
+ if (primaryFilter != null &&
+ !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
+ continue;
+ }
+ if (secondaryFilters != null) { // OR logic
+ boolean flag = false;
+ for (NameValuePair secondaryFilter : secondaryFilters) {
+ if (secondaryFilter != null &&
+ matchFilter(entity.getOtherInfo(), secondaryFilter)) {
+ flag = true;
+ break;
+ }
+ }
+ if (!flag) {
+ continue;
+ }
+ }
+ entitiesSelected.add(entity);
+ }
+ List entitiesToReturn = new ArrayList();
+ for (Entity entitySelected : entitiesSelected) {
+ entitiesToReturn.add(maskFields(entitySelected, fields));
+ }
+ Collections.sort(entitiesToReturn);
+ Entities entitiesWrapper = new Entities();
+ entitiesWrapper.setEntities(entitiesToReturn);
+ return entitiesWrapper;
+ }
+
+ @Override
+ public Entity getEntity(String entityId, String entityType,
+ EnumSet fieldsToRetrieve) {
+ if (fieldsToRetrieve == null) {
+ fieldsToRetrieve = EnumSet.allOf(Field.class);
+ }
+ Entity entity = entities.get(new EntityIdentifier(entityId, entityType));
+ if (entity == null) {
+ return null;
+ } else {
+ return maskFields(entity, fieldsToRetrieve);
+ }
+ }
+
+ @Override
+ public Events getEntityTimelines(String entityType,
+ SortedSet entityIds, Long limit, Long windowStart,
+ Long windowEnd,
+ Set eventTypes) {
+ Events allEvents = new Events();
+ if (entityIds == null) {
+ return allEvents;
+ }
+ if (limit == null) {
+ limit = DEFAULT_LIMIT;
+ }
+ if (windowStart == null) {
+ windowStart = Long.MIN_VALUE;
+ }
+ if (windowEnd == null) {
+ windowEnd = Long.MAX_VALUE;
+ }
+ for (String entityId : entityIds) {
+ EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
+ Entity entity = entities.get(entityID);
+ if (entity == null) {
+ continue;
+ }
+ EventsOfOneEntity events = new EventsOfOneEntity();
+ events.setEntityId(entityId);
+ events.setEntityType(entityType);
+ for (Event event : entity.getEvents()) {
+ if (events.getEvents().size() >= limit) {
+ break;
+ }
+ if (event.getTimestamp() <= windowStart) {
+ continue;
+ }
+ if (event.getTimestamp() > windowEnd) {
+ continue;
+ }
+ if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
+ continue;
+ }
+ events.addEvent(event);
+ }
+ allEvents.addEvent(events);
+ }
+ return allEvents;
+ }
+
+ @Override
+ public PutErrors put(Entities data) {
+ PutErrors errors = new PutErrors();
+ for (Entity entity : data.getEntities()) {
+ EntityIdentifier entityId =
+ new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
+ // store entity info in memory
+ Entity existingEntity = entities.get(entityId);
+ if (existingEntity == null) {
+ existingEntity = new Entity();
+ existingEntity.setEntityId(entity.getEntityId());
+ existingEntity.setEntityType(entity.getEntityType());
+ existingEntity.setStartTime(entity.getStartTime());
+ entities.put(entityId, existingEntity);
+ }
+ if (entity.getEvents() != null) {
+ if (existingEntity.getEvents() == null) {
+ existingEntity.setEvents(entity.getEvents());
+ } else {
+ existingEntity.addEvents(entity.getEvents());
+ }
+ Collections.sort(existingEntity.getEvents());
+ }
+ // check startTime
+ if (existingEntity.getStartTime() == null) {
+ if (existingEntity.getEvents() == null
+ || existingEntity.getEvents().isEmpty()) {
+ PutError error = new PutError();
+ error.setEntityId(entityId.getId());
+ error.setEntityType(entityId.getType());
+ error.setErrorCode(PutError.NO_START_TIME);
+ errors.addError(error);
+ entities.remove(entityId);
+ continue;
+ } else {
+ existingEntity.setStartTime(entity.getEvents().get(0).getTimestamp());
+ }
+ }
+ if (entity.getPrimaryFilters() != null) {
+ if (existingEntity.getPrimaryFilters() == null) {
+ existingEntity.setPrimaryFilters(entity.getPrimaryFilters());
+ } else {
+ existingEntity.addPrimaryFilters(entity.getPrimaryFilters());
+ }
+ }
+ if (entity.getOtherInfo() != null) {
+ if (existingEntity.getOtherInfo() == null) {
+ existingEntity.setOtherInfo(entity.getOtherInfo());
+ } else {
+ existingEntity.addOtherInfo(entity.getOtherInfo());
+ }
+ }
+ // relate it to other entities
+ if (entity.getRelatedEntities() == null) {
+ continue;
+ }
+ for (Map.Entry> partRelatedEntities : entity
+ .getRelatedEntities().entrySet()) {
+ if (partRelatedEntities == null) {
+ continue;
+ }
+ for (String idStr : partRelatedEntities.getValue()) {
+ EntityIdentifier relatedEntityId =
+ new EntityIdentifier(idStr, partRelatedEntities.getKey());
+ Entity relatedEntity = entities.get(relatedEntityId);
+ if (relatedEntity != null) {
+ relatedEntity.addRelatedEntity(
+ existingEntity.getEntityType(), existingEntity.getEntityId());
+ } else {
+ relatedEntity = new Entity();
+ relatedEntity.setEntityId(relatedEntityId.getId());
+ relatedEntity.setEntityType(relatedEntityId.getType());
+ relatedEntity.setStartTime(existingEntity.getStartTime());
+ relatedEntity.addRelatedEntity(existingEntity.getEntityType(),
+ existingEntity.getEntityId());
+ entities.put(relatedEntityId, relatedEntity);
+ }
+ }
+ }
+ }
+ return errors;
+ }
+
+ private static Entity maskFields(
+ Entity entity, EnumSet fields) {
+ // Conceal the fields that are not going to be exposed
+ Entity entityToReturn = new Entity();
+ entityToReturn.setEntityId(entity.getEntityId());
+ entityToReturn.setEntityType(entity.getEntityType());
+ entityToReturn.setStartTime(entity.getStartTime());
+ entityToReturn.setEvents(fields.contains(Field.EVENTS) ?
+ entity.getEvents() : fields.contains(Field.LAST_EVENT_ONLY) ?
+ Arrays.asList(entity.getEvents().get(0)) : null);
+ entityToReturn.setRelatedEntities(fields.contains(Field.RELATED_ENTITIES) ?
+ entity.getRelatedEntities() : null);
+ entityToReturn.setPrimaryFilters(fields.contains(Field.PRIMARY_FILTERS) ?
+ entity.getPrimaryFilters() : null);
+ entityToReturn.setOtherInfo(fields.contains(Field.OTHER_INFO) ?
+ entity.getOtherInfo() : null);
+ return entityToReturn;
+ }
+
+ private static boolean matchFilter(Map tags,
+ NameValuePair filter) {
+ Object value = tags.get(filter.getName());
+ if (value == null) { // doesn't have the filter
+ return false;
+ } else if (!value.equals(filter.getValue())) { // doesn't match the filter
+ return false;
+ }
+ return true;
+ }
+
+ private static boolean matchPrimaryFilter(Map> tags,
+ NameValuePair filter) {
+ Set value = tags.get(filter.getName());
+ if (value == null) { // doesn't have the filter
+ return false;
+ } else {
+ return value.contains(filter.getValue());
+ }
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java
new file mode 100644
index 0000000..d8dabd2
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A class holding a name and value pair, used for specifying filters in
+ * {@link TimelineReader}.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class NameValuePair {
+ String name;
+ Object value;
+
+ public NameValuePair(String name, Object value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ /**
+ * Get the name.
+ * @return The name.
+ */
+ public String getName() {
+
+ return name;
+ }
+
+ /**
+ * Get the value.
+ * @return The value.
+ */
+ public Object getValue() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return "{ name: " + name + ", value: " + value + " }";
+ }
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java
new file mode 100644
index 0000000..1838a66
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.Set;
+import java.util.SortedSet;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Events;
+
+/**
+ * This interface is for retrieving timeline information.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public interface TimelineReader {
+
+ /**
+ * Possible fields to retrieve for {@link #getEntities} and {@link #getEntity}
+ * .
+ */
+ enum Field {
+ EVENTS,
+ RELATED_ENTITIES,
+ PRIMARY_FILTERS,
+ OTHER_INFO,
+ LAST_EVENT_ONLY
+ }
+
+ /**
+ * Default limit for {@link #getEntities} and {@link #getEntityTimelines}.
+ */
+ final long DEFAULT_LIMIT = 100;
+
+ /**
+ * This method retrieves a list of entity information, {@link Entity}, sorted
+ * by the starting timestamp for the entity, descending.
+ *
+ * @param entityType
+ * The type of entities to return (required).
+ * @param limit
+ * A limit on the number of entities to return. If null, defaults to
+ * {@link #DEFAULT_LIMIT}.
+ * @param windowStart
+ * The earliest start timestamp to retrieve (exclusive). If null,
+ * defaults to retrieving all entities until the limit is reached.
+ * @param windowEnd
+ * The latest start timestamp to retrieve (inclusive). If null,
+ * defaults to {@link Long#MAX_VALUE}
+ * @param primaryFilter
+ * Retrieves only entities that have the specified primary filter. If
+ * null, retrieves all entities. This is an indexed retrieval, and no
+ * entities that do not match the filter are scanned.
+ * @param secondaryFilters
+ * Retrieves only entities that have exact matches for all the
+ * specified filters in their primary filters or other info. This is
+ * not an indexed retrieval, so all entities are scanned but only
+ * those matching the filters are returned.
+ * @param fieldsToRetrieve
+ * Specifies which fields of the entity object to retrieve (see
+ * {@link Field}). If the set of fields contains
+ * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the
+ * most recent event for each entity is retrieved. If null, retrieves
+ * all fields.
+ * @return An {@link Entities} object.
+ * @throws IOException
+ */
+ Entities getEntities(String entityType,
+ Long limit, Long windowStart, Long windowEnd,
+ NameValuePair primaryFilter, Collection secondaryFilters,
+ EnumSet fieldsToRetrieve) throws IOException;
+
+ /**
+ * This method retrieves the entity information for a given entity.
+ *
+ * @param entityId
+ * The entity whose information will be retrieved.
+ * @param entityType
+ * The type of the entity.
+ * @param fieldsToRetrieve
+ * Specifies which fields of the entity object to retrieve (see
+ * {@link Field}). If the set of fields contains
+ * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the
+ * most recent event for each entity is retrieved. If null, retrieves
+ * all fields.
+ * @return An {@link Entity} object.
+ * @throws IOException
+ */
+ Entity getEntity(String entityId, String entityType, EnumSet
+ fieldsToRetrieve) throws IOException;
+
+ /**
+ * This method retrieves the events for a list of entities all of the same
+ * entity type. The events for each entity are sorted in order of their
+ * timestamps, descending.
+ *
+ * @param entityType
+ * The type of entities to retrieve events for.
+ * @param entityIds
+ * The entity IDs to retrieve events for.
+ * @param limit
+ * A limit on the number of events to return for each entity. If
+ * null, defaults to {@link #DEFAULT_LIMIT} events per entity.
+ * @param windowStart
+ * If not null, retrieves only events later than the given time
+ * (exclusive)
+ * @param windowEnd
+ * If not null, retrieves only events earlier than the given time
+ * (inclusive)
+ * @param eventTypes
+ * Restricts the events returned to the given types. If null, events
+ * of all types will be returned.
+ * @return An {@link Events} object.
+ * @throws IOException
+ */
+ Events getEntityTimelines(String entityType,
+ SortedSet entityIds, Long limit, Long windowStart,
+ Long windowEnd, Set eventTypes) throws IOException;
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java
new file mode 100644
index 0000000..6b50d83
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.service.Service;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public interface TimelineStore extends
+ Service, TimelineReader, TimelineWriter {
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java
new file mode 100644
index 0000000..6ea3f0b
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
+
+import java.io.IOException;
+
+/**
+ * This interface is for storing timeline information.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public interface TimelineWriter {
+
+ /**
+ * Stores entity information to the timeline store. Any errors occurring for
+ * individual put request objects will be reported in the response.
+ *
+ * @param data
+ * An {@link Entities} object.
+ * @return An {@link PutErrors} object.
+ * @throws IOException
+ */
+ PutErrors put(Entities data) throws IOException;
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java
new file mode 100644
index 0000000..970e868
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java
@@ -0,0 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+import org.apache.hadoop.classification.InterfaceAudience;
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java
index d2cfc32..93065b3 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java
@@ -21,7 +21,7 @@
import org.apache.hadoop.yarn.server.api.ApplicationContext;
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
@@ -30,22 +30,22 @@
public class AHSWebApp extends WebApp implements YarnWebParams {
private final ApplicationHistoryManager applicationHistoryManager;
- private final ApplicationTimelineStore applicationTimelineStore;
+ private final TimelineStore timelineStore;
public AHSWebApp(ApplicationHistoryManager applicationHistoryManager,
- ApplicationTimelineStore applicationTimelineStore) {
+ TimelineStore timelineStore) {
this.applicationHistoryManager = applicationHistoryManager;
- this.applicationTimelineStore = applicationTimelineStore;
+ this.timelineStore = timelineStore;
}
@Override
public void setup() {
bind(YarnJacksonJaxbJsonProvider.class);
bind(AHSWebServices.class);
- bind(ATSWebServices.class);
+ bind(TimelineWebServices.class);
bind(GenericExceptionHandler.class);
bind(ApplicationContext.class).toInstance(applicationHistoryManager);
- bind(ApplicationTimelineStore.class).toInstance(applicationTimelineStore);
+ bind(TimelineStore.class).toInstance(timelineStore);
route("/", AHSController.class);
route(pajoin("/apps", APP_STATE), AHSController.class);
route(pajoin("/app", APPLICATION_ID), AHSController.class, "app");
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java
deleted file mode 100644
index baf00d6..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineReader.Field;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.NameValuePair;
-import org.apache.hadoop.yarn.webapp.BadRequestException;
-
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-
-@Singleton
-@Path("/ws/v1/apptimeline")
-//TODO: support XML serialization/deserialization
-public class ATSWebServices {
-
- private static final Log LOG = LogFactory.getLog(ATSWebServices.class);
-
- private ApplicationTimelineStore store;
-
- @Inject
- public ATSWebServices(ApplicationTimelineStore store) {
- this.store = store;
- }
-
- @XmlRootElement(name = "about")
- @XmlAccessorType(XmlAccessType.NONE)
- @Public
- @Unstable
- public static class AboutInfo {
-
- private String about;
-
- public AboutInfo() {
-
- }
-
- public AboutInfo(String about) {
- this.about = about;
- }
-
- @XmlElement(name = "About")
- public String getAbout() {
- return about;
- }
-
- public void setAbout(String about) {
- this.about = about;
- }
-
- }
-
- /**
- * Return the description of the application timeline web services.
- */
- @GET
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public AboutInfo about(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res) {
- init(res);
- return new AboutInfo("Application Timeline API");
- }
-
- /**
- * Return a list of entities that match the given parameters.
- */
- @GET
- @Path("/{entityType}")
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public ATSEntities getEntities(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- @PathParam("entityType") String entityType,
- @QueryParam("primaryFilter") String primaryFilter,
- @QueryParam("secondaryFilter") String secondaryFilter,
- @QueryParam("windowStart") String windowStart,
- @QueryParam("windowEnd") String windowEnd,
- @QueryParam("limit") String limit,
- @QueryParam("fields") String fields) {
- init(res);
- ATSEntities entities = null;
- try {
- entities = store.getEntities(
- parseStr(entityType),
- parseLongStr(limit),
- parseLongStr(windowStart),
- parseLongStr(windowEnd),
- parsePairStr(primaryFilter, ":"),
- parsePairsStr(secondaryFilter, ",", ":"),
- parseFieldsStr(fields, ","));
- } catch (NumberFormatException e) {
- throw new BadRequestException(
- "windowStart, windowEnd or limit is not a numeric value.");
- } catch (IllegalArgumentException e) {
- throw new BadRequestException("requested invalid field.");
- } catch (IOException e) {
- LOG.error("Error getting entities", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- if (entities == null) {
- return new ATSEntities();
- }
- return entities;
- }
-
- /**
- * Return a single entity of the given entity type and Id.
- */
- @GET
- @Path("/{entityType}/{entityId}")
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public ATSEntity getEntity(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- @PathParam("entityType") String entityType,
- @PathParam("entityId") String entityId,
- @QueryParam("fields") String fields) {
- init(res);
- ATSEntity entity = null;
- try {
- entity =
- store.getEntity(parseStr(entityId), parseStr(entityType),
- parseFieldsStr(fields, ","));
- } catch (IllegalArgumentException e) {
- throw new BadRequestException(
- "requested invalid field.");
- } catch (IOException e) {
- LOG.error("Error getting entity", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- if (entity == null) {
- throw new WebApplicationException(Response.Status.NOT_FOUND);
- }
- return entity;
- }
-
- /**
- * Return the events that match the given parameters.
- */
- @GET
- @Path("/{entityType}/events")
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public ATSEvents getEvents(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- @PathParam("entityType") String entityType,
- @QueryParam("entityId") String entityId,
- @QueryParam("eventType") String eventType,
- @QueryParam("windowStart") String windowStart,
- @QueryParam("windowEnd") String windowEnd,
- @QueryParam("limit") String limit) {
- init(res);
- ATSEvents events = null;
- try {
- events = store.getEntityTimelines(
- parseStr(entityType),
- parseArrayStr(entityId, ","),
- parseLongStr(limit),
- parseLongStr(windowStart),
- parseLongStr(windowEnd),
- parseArrayStr(eventType, ","));
- } catch (NumberFormatException e) {
- throw new BadRequestException(
- "windowStart, windowEnd or limit is not a numeric value.");
- } catch (IOException e) {
- LOG.error("Error getting entity timelines", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- if (events == null) {
- return new ATSEvents();
- }
- return events;
- }
-
- /**
- * Store the given entities into the timeline store, and return the errors
- * that happen during storing.
- */
- @POST
- @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public ATSPutErrors postEntities(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- ATSEntities entities) {
- init(res);
- if (entities == null) {
- return new ATSPutErrors();
- }
- try {
- return store.put(entities);
- } catch (IOException e) {
- LOG.error("Error putting entities", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- }
-
- private void init(HttpServletResponse response) {
- response.setContentType(null);
- }
-
- private static SortedSet parseArrayStr(String str, String delimiter) {
- if (str == null) {
- return null;
- }
- SortedSet strSet = new TreeSet();
- String[] strs = str.split(delimiter);
- for (String aStr : strs) {
- strSet.add(aStr.trim());
- }
- return strSet;
- }
-
- private static NameValuePair parsePairStr(String str, String delimiter) {
- if (str == null) {
- return null;
- }
- String[] strs = str.split(delimiter, 2);
- return new NameValuePair(strs[0].trim(), strs[1].trim());
- }
-
- private static Collection parsePairsStr(
- String str, String aDelimiter, String pDelimiter) {
- if (str == null) {
- return null;
- }
- String[] strs = str.split(aDelimiter);
- Set pairs = new HashSet();
- for (String aStr : strs) {
- pairs.add(parsePairStr(aStr, pDelimiter));
- }
- return pairs;
- }
-
- private static EnumSet parseFieldsStr(String str, String delimiter) {
- if (str == null) {
- return null;
- }
- String[] strs = str.split(delimiter);
- List fieldList = new ArrayList();
- for (String s : strs) {
- s = s.trim().toUpperCase();
- if (s.equals("EVENTS"))
- fieldList.add(Field.EVENTS);
- else if (s.equals("LASTEVENTONLY"))
- fieldList.add(Field.LAST_EVENT_ONLY);
- else if (s.equals("RELATEDENTITIES"))
- fieldList.add(Field.RELATED_ENTITIES);
- else if (s.equals("PRIMARYFILTERS"))
- fieldList.add(Field.PRIMARY_FILTERS);
- else if (s.equals("OTHERINFO"))
- fieldList.add(Field.OTHER_INFO);
- }
- if (fieldList.size() == 0)
- return null;
- Field f1 = fieldList.remove(fieldList.size() - 1);
- if (fieldList.size() == 0)
- return EnumSet.of(f1);
- else
- return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()]));
- }
-
- private static Long parseLongStr(String str) {
- return str == null ? null : Long.parseLong(str.trim());
- }
-
- private static String parseStr(String str) {
- return str == null ? null : str.trim();
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
new file mode 100644
index 0000000..013614c
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
@@ -0,0 +1,328 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Events;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+import org.apache.hadoop.yarn.webapp.BadRequestException;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
+@Singleton
+@Path("/ws/v1/timeline")
+//TODO: support XML serialization/deserialization
+public class TimelineWebServices {
+
+ private static final Log LOG = LogFactory.getLog(TimelineWebServices.class);
+
+ private TimelineStore store;
+
+ @Inject
+ public TimelineWebServices(TimelineStore store) {
+ this.store = store;
+ }
+
+ @XmlRootElement(name = "about")
+ @XmlAccessorType(XmlAccessType.NONE)
+ @Public
+ @Unstable
+ public static class AboutInfo {
+
+ private String about;
+
+ public AboutInfo() {
+
+ }
+
+ public AboutInfo(String about) {
+ this.about = about;
+ }
+
+ @XmlElement(name = "About")
+ public String getAbout() {
+ return about;
+ }
+
+ public void setAbout(String about) {
+ this.about = about;
+ }
+
+ }
+
+ /**
+ * Return the description of the timeline web services.
+ */
+ @GET
+ @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+ public AboutInfo about(
+ @Context HttpServletRequest req,
+ @Context HttpServletResponse res) {
+ init(res);
+ return new AboutInfo("Timeline API");
+ }
+
+ /**
+ * Return a list of entities that match the given parameters.
+ */
+ @GET
+ @Path("/{entityType}")
+ @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+ public Entities getEntities(
+ @Context HttpServletRequest req,
+ @Context HttpServletResponse res,
+ @PathParam("entityType") String entityType,
+ @QueryParam("primaryFilter") String primaryFilter,
+ @QueryParam("secondaryFilter") String secondaryFilter,
+ @QueryParam("windowStart") String windowStart,
+ @QueryParam("windowEnd") String windowEnd,
+ @QueryParam("limit") String limit,
+ @QueryParam("fields") String fields) {
+ init(res);
+ Entities entities = null;
+ try {
+ entities = store.getEntities(
+ parseStr(entityType),
+ parseLongStr(limit),
+ parseLongStr(windowStart),
+ parseLongStr(windowEnd),
+ parsePairStr(primaryFilter, ":"),
+ parsePairsStr(secondaryFilter, ",", ":"),
+ parseFieldsStr(fields, ","));
+ } catch (NumberFormatException e) {
+ throw new BadRequestException(
+ "windowStart, windowEnd or limit is not a numeric value.");
+ } catch (IllegalArgumentException e) {
+ throw new BadRequestException("requested invalid field.");
+ } catch (IOException e) {
+ LOG.error("Error getting entities", e);
+ throw new WebApplicationException(e,
+ Response.Status.INTERNAL_SERVER_ERROR);
+ }
+ if (entities == null) {
+ return new Entities();
+ }
+ return entities;
+ }
+
+ /**
+ * Return a single entity of the given entity type and Id.
+ */
+ @GET
+ @Path("/{entityType}/{entityId}")
+ @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+ public Entity getEntity(
+ @Context HttpServletRequest req,
+ @Context HttpServletResponse res,
+ @PathParam("entityType") String entityType,
+ @PathParam("entityId") String entityId,
+ @QueryParam("fields") String fields) {
+ init(res);
+ Entity entity = null;
+ try {
+ entity =
+ store.getEntity(parseStr(entityId), parseStr(entityType),
+ parseFieldsStr(fields, ","));
+ } catch (IllegalArgumentException e) {
+ throw new BadRequestException(
+ "requested invalid field.");
+ } catch (IOException e) {
+ LOG.error("Error getting entity", e);
+ throw new WebApplicationException(e,
+ Response.Status.INTERNAL_SERVER_ERROR);
+ }
+ if (entity == null) {
+ throw new WebApplicationException(Response.Status.NOT_FOUND);
+ }
+ return entity;
+ }
+
+ /**
+ * Return the events that match the given parameters.
+ */
+ @GET
+ @Path("/{entityType}/events")
+ @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+ public Events getEvents(
+ @Context HttpServletRequest req,
+ @Context HttpServletResponse res,
+ @PathParam("entityType") String entityType,
+ @QueryParam("entityId") String entityId,
+ @QueryParam("eventType") String eventType,
+ @QueryParam("windowStart") String windowStart,
+ @QueryParam("windowEnd") String windowEnd,
+ @QueryParam("limit") String limit) {
+ init(res);
+ Events events = null;
+ try {
+ events = store.getEntityTimelines(
+ parseStr(entityType),
+ parseArrayStr(entityId, ","),
+ parseLongStr(limit),
+ parseLongStr(windowStart),
+ parseLongStr(windowEnd),
+ parseArrayStr(eventType, ","));
+ } catch (NumberFormatException e) {
+ throw new BadRequestException(
+ "windowStart, windowEnd or limit is not a numeric value.");
+ } catch (IOException e) {
+ LOG.error("Error getting entity timelines", e);
+ throw new WebApplicationException(e,
+ Response.Status.INTERNAL_SERVER_ERROR);
+ }
+ if (events == null) {
+ return new Events();
+ }
+ return events;
+ }
+
+ /**
+ * Store the given entities into the timeline store, and return the errors
+ * that happen during storing.
+ */
+ @POST
+ @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+ public PutErrors postEntities(
+ @Context HttpServletRequest req,
+ @Context HttpServletResponse res,
+ Entities entities) {
+ init(res);
+ if (entities == null) {
+ return new PutErrors();
+ }
+ try {
+ return store.put(entities);
+ } catch (IOException e) {
+ LOG.error("Error putting entities", e);
+ throw new WebApplicationException(e,
+ Response.Status.INTERNAL_SERVER_ERROR);
+ }
+ }
+
+ private void init(HttpServletResponse response) {
+ response.setContentType(null);
+ }
+
+ private static SortedSet parseArrayStr(String str, String delimiter) {
+ if (str == null) {
+ return null;
+ }
+ SortedSet strSet = new TreeSet();
+ String[] strs = str.split(delimiter);
+ for (String aStr : strs) {
+ strSet.add(aStr.trim());
+ }
+ return strSet;
+ }
+
+ private static NameValuePair parsePairStr(String str, String delimiter) {
+ if (str == null) {
+ return null;
+ }
+ String[] strs = str.split(delimiter, 2);
+ return new NameValuePair(strs[0].trim(), strs[1].trim());
+ }
+
+ private static Collection parsePairsStr(
+ String str, String aDelimiter, String pDelimiter) {
+ if (str == null) {
+ return null;
+ }
+ String[] strs = str.split(aDelimiter);
+ Set pairs = new HashSet();
+ for (String aStr : strs) {
+ pairs.add(parsePairStr(aStr, pDelimiter));
+ }
+ return pairs;
+ }
+
+ private static EnumSet parseFieldsStr(String str, String delimiter) {
+ if (str == null) {
+ return null;
+ }
+ String[] strs = str.split(delimiter);
+ List fieldList = new ArrayList();
+ for (String s : strs) {
+ s = s.trim().toUpperCase();
+ if (s.equals("EVENTS"))
+ fieldList.add(Field.EVENTS);
+ else if (s.equals("LASTEVENTONLY"))
+ fieldList.add(Field.LAST_EVENT_ONLY);
+ else if (s.equals("RELATEDENTITIES"))
+ fieldList.add(Field.RELATED_ENTITIES);
+ else if (s.equals("PRIMARYFILTERS"))
+ fieldList.add(Field.PRIMARY_FILTERS);
+ else if (s.equals("OTHERINFO"))
+ fieldList.add(Field.OTHER_INFO);
+ }
+ if (fieldList.size() == 0)
+ return null;
+ Field f1 = fieldList.remove(fieldList.size() - 1);
+ if (fieldList.size() == 0)
+ return EnumSet.of(f1);
+ else
+ return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()]));
+ }
+
+ private static Long parseLongStr(String str) {
+ return str == null ? null : Long.parseLong(str.trim());
+ }
+
+ private static String parseStr(String str) {
+ return str == null ? null : str.trim();
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java
deleted file mode 100644
index 9afa5c0..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java
+++ /dev/null
@@ -1,538 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineReader.Field;
-
-public class ApplicationTimelineStoreTestUtils {
-
- private static final Map EMPTY_MAP = Collections.emptyMap();
- private static final Map> EMPTY_REL_ENTITIES =
- new HashMap>();
-
- protected ApplicationTimelineStore store;
- private String entity1;
- private String entityType1;
- private String entity1b;
- private String entity2;
- private String entityType2;
- private Map primaryFilters;
- private Map secondaryFilters;
- private Map allFilters;
- private Map otherInfo;
- private Map> relEntityMap;
- private NameValuePair userFilter;
- private Collection goodTestingFilters;
- private Collection badTestingFilters;
- private ATSEvent ev1;
- private ATSEvent ev2;
- private ATSEvent ev3;
- private ATSEvent ev4;
- private Map eventInfo;
- private List events1;
- private List events2;
-
- /**
- * Load test data into the given store
- */
- protected void loadTestData() throws IOException {
- ATSEntities atsEntities = new ATSEntities();
- Map primaryFilters = new HashMap();
- primaryFilters.put("user", "username");
- primaryFilters.put("appname", 12345l);
- Map secondaryFilters = new HashMap();
- secondaryFilters.put("startTime", 123456l);
- secondaryFilters.put("status", "RUNNING");
- Map otherInfo1 = new HashMap();
- otherInfo1.put("info1", "val1");
- otherInfo1.putAll(secondaryFilters);
-
- String entity1 = "id_1";
- String entityType1 = "type_1";
- String entity1b = "id_2";
- String entity2 = "id_2";
- String entityType2 = "type_2";
-
- Map> relatedEntities =
- new HashMap>();
- relatedEntities.put(entityType2, Collections.singletonList(entity2));
-
- ATSEvent ev3 = createEvent(789l, "launch_event", null);
- ATSEvent ev4 = createEvent(-123l, "init_event", null);
- List events = new ArrayList();
- events.add(ev3);
- events.add(ev4);
- atsEntities.setEntities(Collections.singletonList(createEntity(entity2,
- entityType2, null, events, null, null, null)));
- ATSPutErrors response = store.put(atsEntities);
- assertEquals(0, response.getErrors().size());
-
- ATSEvent ev1 = createEvent(123l, "start_event", null);
- atsEntities.setEntities(Collections.singletonList(createEntity(entity1,
- entityType1, 123l, Collections.singletonList(ev1),
- relatedEntities, primaryFilters, otherInfo1)));
- response = store.put(atsEntities);
- assertEquals(0, response.getErrors().size());
- atsEntities.setEntities(Collections.singletonList(createEntity(entity1b,
- entityType1, null, Collections.singletonList(ev1), relatedEntities,
- primaryFilters, otherInfo1)));
- response = store.put(atsEntities);
- assertEquals(0, response.getErrors().size());
-
- Map eventInfo = new HashMap();
- eventInfo.put("event info 1", "val1");
- ATSEvent ev2 = createEvent(456l, "end_event", eventInfo);
- Map otherInfo2 = new HashMap();
- otherInfo2.put("info2", "val2");
- atsEntities.setEntities(Collections.singletonList(createEntity(entity1,
- entityType1, null, Collections.singletonList(ev2), null,
- primaryFilters, otherInfo2)));
- response = store.put(atsEntities);
- assertEquals(0, response.getErrors().size());
- atsEntities.setEntities(Collections.singletonList(createEntity(entity1b,
- entityType1, 789l, Collections.singletonList(ev2), null,
- primaryFilters, otherInfo2)));
- response = store.put(atsEntities);
- assertEquals(0, response.getErrors().size());
-
- atsEntities.setEntities(Collections.singletonList(createEntity(
- "badentityid", "badentity", null, null, null, null, otherInfo1)));
- response = store.put(atsEntities);
- assertEquals(1, response.getErrors().size());
- ATSPutError error = response.getErrors().get(0);
- assertEquals("badentityid", error.getEntityId());
- assertEquals("badentity", error.getEntityType());
- assertEquals(ATSPutError.NO_START_TIME, error.getErrorCode());
- }
-
- /**
- * Load verification data
- */
- protected void loadVerificationData() throws Exception {
- userFilter = new NameValuePair("user",
- "username");
- goodTestingFilters = new ArrayList();
- goodTestingFilters.add(new NameValuePair("appname", 12345l));
- goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
- badTestingFilters = new ArrayList();
- badTestingFilters.add(new NameValuePair("appname", 12345l));
- badTestingFilters.add(new NameValuePair("status", "FINISHED"));
-
- primaryFilters = new HashMap();
- primaryFilters.put("user", "username");
- primaryFilters.put("appname", 12345l);
- secondaryFilters = new HashMap();
- secondaryFilters.put("startTime", 123456l);
- secondaryFilters.put("status", "RUNNING");
- allFilters = new HashMap();
- allFilters.putAll(secondaryFilters);
- allFilters.putAll(primaryFilters);
- otherInfo = new HashMap();
- otherInfo.put("info1", "val1");
- otherInfo.put("info2", "val2");
- otherInfo.putAll(secondaryFilters);
-
- entity1 = "id_1";
- entityType1 = "type_1";
- entity1b = "id_2";
- entity2 = "id_2";
- entityType2 = "type_2";
-
- ev1 = createEvent(123l, "start_event", null);
-
- eventInfo = new HashMap();
- eventInfo.put("event info 1", "val1");
- ev2 = createEvent(456l, "end_event", eventInfo);
- events1 = new ArrayList();
- events1.add(ev2);
- events1.add(ev1);
-
- relEntityMap =
- new HashMap>();
- List ids = new ArrayList();
- ids.add(entity1);
- ids.add(entity1b);
- relEntityMap.put(entityType1, ids);
-
- ev3 = createEvent(789l, "launch_event", null);
- ev4 = createEvent(-123l, "init_event", null);
- events2 = new ArrayList();
- events2.add(ev3);
- events2.add(ev4);
- }
-
- public void testGetSingleEntity() throws IOException {
- // test getting entity info
- verifyEntityInfo(null, null, null, null, null, null,
- store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, store.getEntity(entity1, entityType1,
- EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, store.getEntity(entity1b, entityType1,
- EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entity2, entityType2, events2, relEntityMap, EMPTY_MAP,
- EMPTY_MAP, store.getEntity(entity2, entityType2,
- EnumSet.allOf(Field.class)));
-
- // test getting single fields
- verifyEntityInfo(entity1, entityType1, events1, null, null, null,
- store.getEntity(entity1, entityType1, EnumSet.of(Field.EVENTS)));
-
- verifyEntityInfo(entity1, entityType1, Collections.singletonList(ev2),
- null, null, null, store.getEntity(entity1, entityType1,
- EnumSet.of(Field.LAST_EVENT_ONLY)));
-
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, store.getEntity(entity1b, entityType1,
- null));
-
- verifyEntityInfo(entity1, entityType1, null, null, primaryFilters, null,
- store.getEntity(entity1, entityType1,
- EnumSet.of(Field.PRIMARY_FILTERS)));
-
- verifyEntityInfo(entity1, entityType1, null, null, null, otherInfo,
- store.getEntity(entity1, entityType1, EnumSet.of(Field.OTHER_INFO)));
-
- verifyEntityInfo(entity2, entityType2, null, relEntityMap, null, null,
- store.getEntity(entity2, entityType2,
- EnumSet.of(Field.RELATED_ENTITIES)));
- }
-
- public void testGetEntities() throws IOException {
- // test getting entities
- assertEquals("nonzero entities size for nonexistent type", 0,
- store.getEntities("type_0", null, null, null, null, null,
- null).getEntities().size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- store.getEntities("type_3", null, null, null, null, null,
- null).getEntities().size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- store.getEntities("type_0", null, null, null, userFilter,
- null, null).getEntities().size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- store.getEntities("type_3", null, null, null, userFilter,
- null, null).getEntities().size());
-
- List entities =
- store.getEntities("type_1", null, null, null, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = store.getEntities("type_2", null, null, null, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(1, entities.size());
- verifyEntityInfo(entity2, entityType2, events2, relEntityMap, EMPTY_MAP,
- EMPTY_MAP, entities.get(0));
-
- entities = store.getEntities("type_1", 1l, null, null, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(1, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = store.getEntities("type_1", 1l, 0l, null, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(1, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = store.getEntities("type_1", null, 234l, null, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", null, 123l, null, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", null, 234l, 345l, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", null, null, 345l, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = store.getEntities("type_1", null, null, 123l, null, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
- }
-
- public void testGetEntitiesWithPrimaryFilters() throws IOException {
- // test using primary filter
- assertEquals("nonzero entities size for primary filter", 0,
- store.getEntities("type_1", null, null, null,
- new NameValuePair("none", "none"), null,
- EnumSet.allOf(Field.class)).getEntities().size());
- assertEquals("nonzero entities size for primary filter", 0,
- store.getEntities("type_2", null, null, null,
- new NameValuePair("none", "none"), null,
- EnumSet.allOf(Field.class)).getEntities().size());
- assertEquals("nonzero entities size for primary filter", 0,
- store.getEntities("type_3", null, null, null,
- new NameValuePair("none", "none"), null,
- EnumSet.allOf(Field.class)).getEntities().size());
-
- List entities = store.getEntities("type_1", null, null, null,
- userFilter, null, EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = store.getEntities("type_2", null, null, null, userFilter, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", 1l, null, null, userFilter, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(1, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = store.getEntities("type_1", 1l, 0l, null, userFilter, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(1, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = store.getEntities("type_1", null, 234l, null, userFilter, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", null, 234l, 345l, userFilter, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", null, null, 345l, userFilter, null,
- EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
- }
-
- public void testGetEntitiesWithSecondaryFilters() throws IOException {
- // test using secondary filter
- List entities = store.getEntities("type_1", null, null, null,
- null, goodTestingFilters, EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = store.getEntities("type_1", null, null, null, userFilter,
- goodTestingFilters, EnumSet.allOf(Field.class)).getEntities();
- assertEquals(2, entities.size());
- verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = store.getEntities("type_1", null, null, null, null,
- badTestingFilters, EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
-
- entities = store.getEntities("type_1", null, null, null, userFilter,
- badTestingFilters, EnumSet.allOf(Field.class)).getEntities();
- assertEquals(0, entities.size());
- }
-
- public void testGetEvents() throws IOException {
- // test getting entity timelines
- SortedSet sortedSet = new TreeSet();
- sortedSet.add(entity1);
- List timelines =
- store.getEntityTimelines(entityType1, sortedSet, null, null,
- null, null).getAllEvents();
- assertEquals(1, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2, ev1);
-
- sortedSet.add(entity1b);
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2, ev1);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2, ev1);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, 1l,
- null, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- 345l, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- 123l, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, 345l, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev1);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev1);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, 123l, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev1);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev1);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, null, Collections.singleton("end_event")).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2);
-
- sortedSet.add(entity2);
- timelines = store.getEntityTimelines(entityType2, sortedSet, null,
- null, null, null).getAllEvents();
- assertEquals(1, timelines.size());
- verifyEntityTimeline(timelines.get(0), entity2, entityType2, ev3, ev4);
- }
-
- /**
- * Verify a single entity
- */
- private static void verifyEntityInfo(String entity, String entityType,
- List events, Map> relatedEntities,
- Map primaryFilters, Map otherInfo,
- ATSEntity retrievedEntityInfo) {
- if (entity == null) {
- assertNull(retrievedEntityInfo);
- return;
- }
- assertEquals(entity, retrievedEntityInfo.getEntityId());
- assertEquals(entityType, retrievedEntityInfo.getEntityType());
- if (events == null)
- assertNull(retrievedEntityInfo.getEvents());
- else
- assertEquals(events, retrievedEntityInfo.getEvents());
- if (relatedEntities == null)
- assertNull(retrievedEntityInfo.getRelatedEntities());
- else
- assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities());
- if (primaryFilters == null)
- assertNull(retrievedEntityInfo.getPrimaryFilters());
- else
- assertTrue(primaryFilters.equals(
- retrievedEntityInfo.getPrimaryFilters()));
- if (otherInfo == null)
- assertNull(retrievedEntityInfo.getOtherInfo());
- else
- assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo()));
- }
-
- /**
- * Verify timeline events
- */
- private static void verifyEntityTimeline(
- ATSEventsOfOneEntity retrievedEvents, String entity, String entityType,
- ATSEvent... actualEvents) {
- assertEquals(entity, retrievedEvents.getEntityId());
- assertEquals(entityType, retrievedEvents.getEntityType());
- assertEquals(actualEvents.length, retrievedEvents.getEvents().size());
- for (int i = 0; i < actualEvents.length; i++) {
- assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i));
- }
- }
-
- /**
- * Create a test entity
- */
- private static ATSEntity createEntity(String entity, String entityType,
- Long startTime, List events,
- Map> relatedEntities,
- Map primaryFilters, Map otherInfo) {
- ATSEntity atsEntity = new ATSEntity();
- atsEntity.setEntityId(entity);
- atsEntity.setEntityType(entityType);
- atsEntity.setStartTime(startTime);
- atsEntity.setEvents(events);
- if (relatedEntities != null)
- for (Entry> e : relatedEntities.entrySet())
- for (String v : e.getValue())
- atsEntity.addRelatedEntity(e.getKey(), v);
- else
- atsEntity.setRelatedEntities(null);
- atsEntity.setPrimaryFilters(primaryFilters);
- atsEntity.setOtherInfo(otherInfo);
- return atsEntity;
- }
-
- /**
- * Create a test event
- */
- private static ATSEvent createEvent(long timestamp, String type, Map info) {
- ATSEvent event = new ATSEvent();
- event.setTimestamp(timestamp);
- event.setEventType(type);
- event.setEventInfo(info);
- return event;
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java
deleted file mode 100644
index 4bb453a..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.WritableComparator;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class TestGenericObjectMapper {
-
- @Test
- public void testEncoding() {
- testEncoding(Long.MAX_VALUE);
- testEncoding(Long.MIN_VALUE);
- testEncoding(0l);
- testEncoding(128l);
- testEncoding(256l);
- testEncoding(512l);
- testEncoding(-256l);
- }
-
- private static void testEncoding(long l) {
- byte[] b = GenericObjectMapper.writeReverseOrderedLong(l);
- assertEquals("error decoding", l,
- GenericObjectMapper.readReverseOrderedLong(b, 0));
- byte[] buf = new byte[16];
- System.arraycopy(b, 0, buf, 5, 8);
- assertEquals("error decoding at offset", l,
- GenericObjectMapper.readReverseOrderedLong(buf, 5));
- if (l > Long.MIN_VALUE) {
- byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1);
- assertEquals("error preserving ordering", 1,
- WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length));
- }
- if (l < Long.MAX_VALUE) {
- byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1);
- assertEquals("error preserving ordering", 1,
- WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length));
- }
- }
-
- private static void verify(Object o) throws IOException {
- assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o)));
- }
-
- @Test
- public void testValueTypes() throws IOException {
- verify(42l);
- verify(42);
- verify(1.23);
- verify("abc");
- verify(true);
- List list = new ArrayList();
- list.add("123");
- list.add("abc");
- verify(list);
- Map map = new HashMap();
- map.put("k1","v1");
- map.put("k2","v2");
- verify(map);
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java
deleted file mode 100644
index b868049..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileContext;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors;
-import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class TestLeveldbApplicationTimelineStore
- extends ApplicationTimelineStoreTestUtils {
- private FileContext fsContext;
- private File fsPath;
-
- @Before
- public void setup() throws Exception {
- fsContext = FileContext.getLocalFSFileContext();
- Configuration conf = new Configuration();
- fsPath = new File("target", this.getClass().getSimpleName() +
- "-tmpDir").getAbsoluteFile();
- fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
- conf.set(YarnConfiguration.ATS_LEVELDB_PATH_PROPERTY,
- fsPath.getAbsolutePath());
- store = new LeveldbApplicationTimelineStore();
- store.init(conf);
- store.start();
- loadTestData();
- loadVerificationData();
- }
-
- @After
- public void tearDown() throws Exception {
- store.stop();
- fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
- }
-
- @Test
- public void testGetSingleEntity() throws IOException {
- super.testGetSingleEntity();
- ((LeveldbApplicationTimelineStore)store).clearStartTimeCache();
- super.testGetSingleEntity();
- }
-
- @Test
- public void testGetEntities() throws IOException {
- super.testGetEntities();
- }
-
- @Test
- public void testGetEntitiesWithPrimaryFilters() throws IOException {
- super.testGetEntitiesWithPrimaryFilters();
- }
-
- @Test
- public void testGetEntitiesWithSecondaryFilters() throws IOException {
- super.testGetEntitiesWithSecondaryFilters();
- }
-
- @Test
- public void testGetEvents() throws IOException {
- super.testGetEvents();
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java
deleted file mode 100644
index 07a3955..0000000
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline;
-
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-
-public class TestMemoryApplicationTimelineStore
- extends ApplicationTimelineStoreTestUtils {
-
- @Before
- public void setup() throws Exception {
- store = new MemoryApplicationTimelineStore();
- store.init(new YarnConfiguration());
- store.start();
- loadTestData();
- loadVerificationData();
- }
-
- @After
- public void tearDown() throws Exception {
- store.stop();
- }
-
- public ApplicationTimelineStore getApplicationTimelineStore() {
- return store;
- }
-
- @Test
- public void testGetSingleEntity() throws IOException {
- super.testGetSingleEntity();
- }
-
- @Test
- public void testGetEntities() throws IOException {
- super.testGetEntities();
- }
-
- @Test
- public void testGetEntitiesWithPrimaryFilters() throws IOException {
- super.testGetEntitiesWithPrimaryFilters();
- }
-
- @Test
- public void testGetEntitiesWithSecondaryFilters() throws IOException {
- super.testGetEntitiesWithSecondaryFilters();
- }
-
- @Test
- public void testGetEvents() throws IOException {
- super.testGetEvents();
- }
-
-}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
new file mode 100644
index 0000000..676972b
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class TestGenericObjectMapper {
+
+ @Test
+ public void testEncoding() {
+ testEncoding(Long.MAX_VALUE);
+ testEncoding(Long.MIN_VALUE);
+ testEncoding(0l);
+ testEncoding(128l);
+ testEncoding(256l);
+ testEncoding(512l);
+ testEncoding(-256l);
+ }
+
+ private static void testEncoding(long l) {
+ byte[] b = GenericObjectMapper.writeReverseOrderedLong(l);
+ assertEquals("error decoding", l,
+ GenericObjectMapper.readReverseOrderedLong(b, 0));
+ byte[] buf = new byte[16];
+ System.arraycopy(b, 0, buf, 5, 8);
+ assertEquals("error decoding at offset", l,
+ GenericObjectMapper.readReverseOrderedLong(buf, 5));
+ if (l > Long.MIN_VALUE) {
+ byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1);
+ assertEquals("error preserving ordering", 1,
+ WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length));
+ }
+ if (l < Long.MAX_VALUE) {
+ byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1);
+ assertEquals("error preserving ordering", 1,
+ WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length));
+ }
+ }
+
+ private static void verify(Object o) throws IOException {
+ assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o)));
+ }
+
+ @Test
+ public void testValueTypes() throws IOException {
+ verify(42l);
+ verify(42);
+ verify(1.23);
+ verify("abc");
+ verify(true);
+ List list = new ArrayList();
+ list.add("123");
+ list.add("abc");
+ verify(list);
+ Map map = new HashMap();
+ map.put("k1","v1");
+ map.put("k2","v2");
+ verify(map);
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
new file mode 100644
index 0000000..a77f163
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class TestLeveldbTimelineStore
+ extends TimelineStoreTestUtils {
+ private FileContext fsContext;
+ private File fsPath;
+
+ @Before
+ public void setup() throws Exception {
+ fsContext = FileContext.getLocalFSFileContext();
+ Configuration conf = new Configuration();
+ fsPath = new File("target", this.getClass().getSimpleName() +
+ "-tmpDir").getAbsoluteFile();
+ fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+ conf.set(YarnConfiguration.TIMELINE_LEVELDB_PATH_PROPERTY,
+ fsPath.getAbsolutePath());
+ store = new LeveldbTimelineStore();
+ store.init(conf);
+ store.start();
+ loadTestData();
+ loadVerificationData();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ store.stop();
+ fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+ }
+
+ @Test
+ public void testGetSingleEntity() throws IOException {
+ super.testGetSingleEntity();
+ ((LeveldbTimelineStore)store).clearStartTimeCache();
+ super.testGetSingleEntity();
+ }
+
+ @Test
+ public void testGetEntities() throws IOException {
+ super.testGetEntities();
+ }
+
+ @Test
+ public void testGetEntitiesWithPrimaryFilters() throws IOException {
+ super.testGetEntitiesWithPrimaryFilters();
+ }
+
+ @Test
+ public void testGetEntitiesWithSecondaryFilters() throws IOException {
+ super.testGetEntitiesWithSecondaryFilters();
+ }
+
+ @Test
+ public void testGetEvents() throws IOException {
+ super.testGetEvents();
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
new file mode 100644
index 0000000..49ab53f
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class TestMemoryTimelineStore
+ extends TimelineStoreTestUtils {
+
+ @Before
+ public void setup() throws Exception {
+ store = new MemoryTimelineStore();
+ store.init(new YarnConfiguration());
+ store.start();
+ loadTestData();
+ loadVerificationData();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ store.stop();
+ }
+
+ public TimelineStore getTimelineStore() {
+ return store;
+ }
+
+ @Test
+ public void testGetSingleEntity() throws IOException {
+ super.testGetSingleEntity();
+ }
+
+ @Test
+ public void testGetEntities() throws IOException {
+ super.testGetEntities();
+ }
+
+ @Test
+ public void testGetEntitiesWithPrimaryFilters() throws IOException {
+ super.testGetEntitiesWithPrimaryFilters();
+ }
+
+ @Test
+ public void testGetEntitiesWithSecondaryFilters() throws IOException {
+ super.testGetEntitiesWithSecondaryFilters();
+ }
+
+ @Test
+ public void testGetEvents() throws IOException {
+ super.testGetEvents();
+ }
+
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
new file mode 100644
index 0000000..3555331
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
@@ -0,0 +1,565 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hadoop.yarn.api.records.timeline.Entities;
+import org.apache.hadoop.yarn.api.records.timeline.Entity;
+import org.apache.hadoop.yarn.api.records.timeline.Event;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors;
+import org.apache.hadoop.yarn.api.records.timeline.Events.EventsOfOneEntity;
+import org.apache.hadoop.yarn.api.records.timeline.PutErrors.PutError;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+
+public class TimelineStoreTestUtils {
+
+ protected static final Map EMPTY_MAP =
+ Collections.emptyMap();
+ protected static final Map> EMPTY_PRIMARY_FILTERS =
+ Collections.emptyMap();
+ protected static final Map> EMPTY_REL_ENTITIES =
+ Collections.emptyMap();
+
+ protected TimelineStore store;
+ protected String entityId1;
+ protected String entityType1;
+ protected String entityId1b;
+ protected String entityId2;
+ protected String entityType2;
+ protected Map> primaryFilters;
+ protected Map secondaryFilters;
+ protected Map allFilters;
+ protected Map otherInfo;
+ protected Map> relEntityMap;
+ protected NameValuePair userFilter;
+ protected Collection goodTestingFilters;
+ protected Collection badTestingFilters;
+ protected Event ev1;
+ protected Event ev2;
+ protected Event ev3;
+ protected Event ev4;
+ protected Map eventInfo;
+ protected List events1;
+ protected List events2;
+
+ /**
+ * Load test data into the given store
+ */
+ protected void loadTestData() throws IOException {
+ Entities entities = new Entities();
+ Map> primaryFilters =
+ new HashMap>();
+ Set l1 = new HashSet();
+ l1.add("username");
+ Set l2 = new HashSet();
+ l2.add(12345l);
+ primaryFilters.put("user", l1);
+ primaryFilters.put("appname", l2);
+ Map secondaryFilters = new HashMap();
+ secondaryFilters.put("startTime", 123456l);
+ secondaryFilters.put("status", "RUNNING");
+ Map otherInfo1 = new HashMap();
+ otherInfo1.put("info1", "val1");
+ otherInfo1.putAll(secondaryFilters);
+
+ String entityId1 = "id_1";
+ String entityType1 = "type_1";
+ String entityId1b = "id_2";
+ String entityId2 = "id_2";
+ String entityType2 = "type_2";
+
+ Map> relatedEntities =
+ new HashMap>();
+ relatedEntities.put(entityType2, Collections.singleton(entityId2));
+
+ Event ev3 = createEvent(789l, "launch_event", null);
+ Event ev4 = createEvent(-123l, "init_event", null);
+ List events = new ArrayList();
+ events.add(ev3);
+ events.add(ev4);
+ entities.setEntities(Collections.singletonList(createEntity(entityId2,
+ entityType2, null, events, null, null, null)));
+ PutErrors response = store.put(entities);
+ assertEquals(0, response.getErrors().size());
+
+ Event ev1 = createEvent(123l, "start_event", null);
+ entities.setEntities(Collections.singletonList(createEntity(entityId1,
+ entityType1, 123l, Collections.singletonList(ev1),
+ relatedEntities, primaryFilters, otherInfo1)));
+ response = store.put(entities);
+ assertEquals(0, response.getErrors().size());
+ entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+ entityType1, null, Collections.singletonList(ev1), relatedEntities,
+ primaryFilters, otherInfo1)));
+ response = store.put(entities);
+ assertEquals(0, response.getErrors().size());
+
+ Map eventInfo = new HashMap();
+ eventInfo.put("event info 1", "val1");
+ Event ev2 = createEvent(456l, "end_event", eventInfo);
+ Map otherInfo2 = new HashMap();
+ otherInfo2.put("info2", "val2");
+ entities.setEntities(Collections.singletonList(createEntity(entityId1,
+ entityType1, null, Collections.singletonList(ev2), null,
+ primaryFilters, otherInfo2)));
+ response = store.put(entities);
+ assertEquals(0, response.getErrors().size());
+ entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+ entityType1, 789l, Collections.singletonList(ev2), null,
+ primaryFilters, otherInfo2)));
+ response = store.put(entities);
+ assertEquals(0, response.getErrors().size());
+
+ entities.setEntities(Collections.singletonList(createEntity(
+ "badentityid", "badentity", null, null, null, null, otherInfo1)));
+ response = store.put(entities);
+ assertEquals(1, response.getErrors().size());
+ PutError error = response.getErrors().get(0);
+ assertEquals("badentityid", error.getEntityId());
+ assertEquals("badentity", error.getEntityType());
+ assertEquals(PutError.NO_START_TIME, error.getErrorCode());
+ }
+
+ /**
+ * Load verification data
+ */
+ protected void loadVerificationData() throws Exception {
+ userFilter = new NameValuePair("user",
+ "username");
+ goodTestingFilters = new ArrayList();
+ goodTestingFilters.add(new NameValuePair("appname", 12345l));
+ goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
+ badTestingFilters = new ArrayList();
+ badTestingFilters.add(new NameValuePair("appname", 12345l));
+ badTestingFilters.add(new NameValuePair("status", "FINISHED"));
+
+ primaryFilters = new HashMap>();
+ Set l1 = new HashSet();
+ l1.add("username");
+ Set l2 = new HashSet();
+ l2.add(12345l);
+ primaryFilters.put("user", l1);
+ primaryFilters.put("appname", l2);
+ secondaryFilters = new HashMap();
+ secondaryFilters.put("startTime", 123456l);
+ secondaryFilters.put("status", "RUNNING");
+ allFilters = new HashMap();
+ allFilters.putAll(secondaryFilters);
+ for (Entry> pf : primaryFilters.entrySet()) {
+ for (Object o : pf.getValue()) {
+ allFilters.put(pf.getKey(), o);
+ }
+ }
+ otherInfo = new HashMap();
+ otherInfo.put("info1", "val1");
+ otherInfo.put("info2", "val2");
+ otherInfo.putAll(secondaryFilters);
+
+ entityId1 = "id_1";
+ entityType1 = "type_1";
+ entityId1b = "id_2";
+ entityId2 = "id_2";
+ entityType2 = "type_2";
+
+ ev1 = createEvent(123l, "start_event", null);
+
+ eventInfo = new HashMap();
+ eventInfo.put("event info 1", "val1");
+ ev2 = createEvent(456l, "end_event", eventInfo);
+ events1 = new ArrayList();
+ events1.add(ev2);
+ events1.add(ev1);
+
+ relEntityMap =
+ new HashMap>();
+ Set ids = new HashSet();
+ ids.add(entityId1);
+ ids.add(entityId1b);
+ relEntityMap.put(entityType1, ids);
+
+ ev3 = createEvent(789l, "launch_event", null);
+ ev4 = createEvent(-123l, "init_event", null);
+ events2 = new ArrayList();
+ events2.add(ev3);
+ events2.add(ev4);
+ }
+
+ public void testGetSingleEntity() throws IOException {
+ // test getting entity info
+ verifyEntityInfo(null, null, null, null, null, null,
+ store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)));
+
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, store.getEntity(entityId1, entityType1,
+ EnumSet.allOf(Field.class)));
+
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1,
+ EnumSet.allOf(Field.class)));
+
+ verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+ EMPTY_PRIMARY_FILTERS, EMPTY_MAP, store.getEntity(entityId2, entityType2,
+ EnumSet.allOf(Field.class)));
+
+ // test getting single fields
+ verifyEntityInfo(entityId1, entityType1, events1, null, null, null,
+ store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS)));
+
+ verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2),
+ null, null, null, store.getEntity(entityId1, entityType1,
+ EnumSet.of(Field.LAST_EVENT_ONLY)));
+
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1,
+ null));
+
+ verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null,
+ store.getEntity(entityId1, entityType1,
+ EnumSet.of(Field.PRIMARY_FILTERS)));
+
+ verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo,
+ store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO)));
+
+ verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null,
+ store.getEntity(entityId2, entityType2,
+ EnumSet.of(Field.RELATED_ENTITIES)));
+ }
+
+ public void testGetEntities() throws IOException {
+ // test getting entities
+ assertEquals("nonzero entities size for nonexistent type", 0,
+ store.getEntities("type_0", null, null, null, null, null,
+ null).getEntities().size());
+ assertEquals("nonzero entities size for nonexistent type", 0,
+ store.getEntities("type_3", null, null, null, null, null,
+ null).getEntities().size());
+ assertEquals("nonzero entities size for nonexistent type", 0,
+ store.getEntities("type_0", null, null, null, userFilter,
+ null, null).getEntities().size());
+ assertEquals("nonzero entities size for nonexistent type", 0,
+ store.getEntities("type_3", null, null, null, userFilter,
+ null, null).getEntities().size());
+
+ List entities =
+ store.getEntities("type_1", null, null, null, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+
+ entities = store.getEntities("type_2", null, null, null, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(1, entities.size());
+ verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+ EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+ entities = store.getEntities("type_1", 1l, null, null, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(1, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+
+ entities = store.getEntities("type_1", 1l, 0l, null, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(1, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+
+ entities = store.getEntities("type_1", null, 234l, null, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", null, 123l, null, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", null, 234l, 345l, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", null, null, 345l, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+
+ entities = store.getEntities("type_1", null, null, 123l, null, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+ }
+
+ public void testGetEntitiesWithPrimaryFilters() throws IOException {
+ // test using primary filter
+ assertEquals("nonzero entities size for primary filter", 0,
+ store.getEntities("type_1", null, null, null,
+ new NameValuePair("none", "none"), null,
+ EnumSet.allOf(Field.class)).getEntities().size());
+ assertEquals("nonzero entities size for primary filter", 0,
+ store.getEntities("type_2", null, null, null,
+ new NameValuePair("none", "none"), null,
+ EnumSet.allOf(Field.class)).getEntities().size());
+ assertEquals("nonzero entities size for primary filter", 0,
+ store.getEntities("type_3", null, null, null,
+ new NameValuePair("none", "none"), null,
+ EnumSet.allOf(Field.class)).getEntities().size());
+
+ List entities = store.getEntities("type_1", null, null, null,
+ userFilter, null, EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+
+ entities = store.getEntities("type_2", null, null, null, userFilter, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", 1l, null, null, userFilter, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(1, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+
+ entities = store.getEntities("type_1", 1l, 0l, null, userFilter, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(1, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+
+ entities = store.getEntities("type_1", null, 234l, null, userFilter, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", null, 234l, 345l, userFilter, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", null, null, 345l, userFilter, null,
+ EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+ }
+
+ public void testGetEntitiesWithSecondaryFilters() throws IOException {
+ // test using secondary filter
+ List entities = store.getEntities("type_1", null, null, null,
+ null, goodTestingFilters, EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+
+ entities = store.getEntities("type_1", null, null, null, userFilter,
+ goodTestingFilters, EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(2, entities.size());
+ verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(0));
+ verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+ primaryFilters, otherInfo, entities.get(1));
+
+ entities = store.getEntities("type_1", null, null, null, null,
+ badTestingFilters, EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+
+ entities = store.getEntities("type_1", null, null, null, userFilter,
+ badTestingFilters, EnumSet.allOf(Field.class)).getEntities();
+ assertEquals(0, entities.size());
+ }
+
+ public void testGetEvents() throws IOException {
+ // test getting entity timelines
+ SortedSet sortedSet = new TreeSet();
+ sortedSet.add(entityId1);
+ List timelines =
+ store.getEntityTimelines(entityType1, sortedSet, null, null,
+ null, null).getAllEvents();
+ assertEquals(1, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+
+ sortedSet.add(entityId1b);
+ timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+ null, null, null).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1);
+
+ timelines = store.getEntityTimelines(entityType1, sortedSet, 1l,
+ null, null, null).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+ timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+ 345l, null, null).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+ timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+ 123l, null, null).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+ timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+ null, 345l, null).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+ timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+ null, 123l, null).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+ timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+ null, null, Collections.singleton("end_event")).getAllEvents();
+ assertEquals(2, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+ verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+ sortedSet.add(entityId2);
+ timelines = store.getEntityTimelines(entityType2, sortedSet, null,
+ null, null, null).getAllEvents();
+ assertEquals(1, timelines.size());
+ verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4);
+ }
+
+ /**
+ * Verify a single entity
+ */
+ protected static void verifyEntityInfo(String entityId, String entityType,
+ List events, Map> relatedEntities,
+ Map> primaryFilters, Map otherInfo,
+ Entity retrievedEntityInfo) {
+ if (entityId == null) {
+ assertNull(retrievedEntityInfo);
+ return;
+ }
+ assertEquals(entityId, retrievedEntityInfo.getEntityId());
+ assertEquals(entityType, retrievedEntityInfo.getEntityType());
+ if (events == null) {
+ assertNull(retrievedEntityInfo.getEvents());
+ } else {
+ assertEquals(events, retrievedEntityInfo.getEvents());
+ }
+ if (relatedEntities == null) {
+ assertNull(retrievedEntityInfo.getRelatedEntities());
+ } else {
+ assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities());
+ }
+ if (primaryFilters == null) {
+ assertNull(retrievedEntityInfo.getPrimaryFilters());
+ } else {
+ assertTrue(primaryFilters.equals(
+ retrievedEntityInfo.getPrimaryFilters()));
+ }
+ if (otherInfo == null) {
+ assertNull(retrievedEntityInfo.getOtherInfo());
+ } else {
+ assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo()));
+ }
+ }
+
+ /**
+ * Verify timeline events
+ */
+ private static void verifyEntityTimeline(
+ EventsOfOneEntity retrievedEvents, String entityId, String entityType,
+ Event... actualEvents) {
+ assertEquals(entityId, retrievedEvents.getEntityId());
+ assertEquals(entityType, retrievedEvents.getEntityType());
+ assertEquals(actualEvents.length, retrievedEvents.getEvents().size());
+ for (int i = 0; i < actualEvents.length; i++) {
+ assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i));
+ }
+ }
+
+ /**
+ * Create a test entity
+ */
+ protected static Entity createEntity(String entityId, String entityType,
+ Long startTime, List events,
+ Map> relatedEntities,
+ Map> primaryFilters,
+ Map