diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntities.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntities.java deleted file mode 100644 index ed02cac..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntities.java +++ /dev/null @@ -1,88 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.apptimeline; - -import java.util.ArrayList; -import java.util.List; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -/** - * The class that hosts a list of application timeline entities. - */ -@XmlRootElement(name = "entities") -@XmlAccessorType(XmlAccessType.NONE) -@Public -@Unstable -public class ATSEntities { - - private List entities = - new ArrayList(); - - public ATSEntities() { - - } - - /** - * Get a list of entities - * - * @return a list of entities - */ - @XmlElement(name = "entities") - public List getEntities() { - return entities; - } - - /** - * Add a single entity into the existing entity list - * - * @param entity - * a single entity - */ - public void addEntity(ATSEntity entity) { - entities.add(entity); - } - - /** - * All a list of entities into the existing entity list - * - * @param entities - * a list of entities - */ - public void addEntities(List entities) { - this.entities.addAll(entities); - } - - /** - * Set the entity list to the given list of entities - * - * @param entities - * a list of entities - */ - public void setEntities(List entities) { - this.entities = entities; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java deleted file mode 100644 index e7907d8..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java +++ /dev/null @@ -1,418 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.apptimeline; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -/** - *

- * The class that contains the the meta information of some conceptual entity of - * an application and its related events. The entity can be an application, an - * application attempt, a container or whatever the user-defined object. - *

- * - *

- * Primary filters will be used to index the entities in - * ApplicationTimelineStore, such that users should carefully - * choose the information they want to store as the primary filters. The - * remaining can be stored as other information. - *

- */ -@XmlRootElement(name = "entity") -@XmlAccessorType(XmlAccessType.NONE) -@Public -@Unstable -public class ATSEntity implements Comparable { - - private String entityType; - private String entityId; - private Long startTime; - private List events = new ArrayList(); - private Map> relatedEntities = - new HashMap>(); - private Map> primaryFilters = - new HashMap>(); - private Map otherInfo = - new HashMap(); - - public ATSEntity() { - - } - - /** - * Get the entity type - * - * @return the entity type - */ - @XmlElement(name = "entitytype") - public String getEntityType() { - return entityType; - } - - /** - * Set the entity type - * - * @param entityType - * the entity type - */ - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - /** - * Get the entity Id - * - * @return the entity Id - */ - @XmlElement(name = "entity") - public String getEntityId() { - return entityId; - } - - /** - * Set the entity Id - * - * @param entityId - * the entity Id - */ - public void setEntityId(String entityId) { - this.entityId = entityId; - } - - /** - * Get the start time of the entity - * - * @return the start time of the entity - */ - @XmlElement(name = "starttime") - public Long getStartTime() { - return startTime; - } - - /** - * Set the start time of the entity - * - * @param startTime - * the start time of the entity - */ - public void setStartTime(Long startTime) { - this.startTime = startTime; - } - - /** - * Get a list of events related to the entity - * - * @return a list of events related to the entity - */ - @XmlElement(name = "events") - public List getEvents() { - return events; - } - - /** - * Add a single event related to the entity to the existing event list - * - * @param event - * a single event related to the entity - */ - public void addEvent(ATSEvent event) { - events.add(event); - } - - /** - * Add a list of events related to the entity to the existing event list - * - * @param events - * a list of events related to the entity - */ - public void addEvents(List events) { - this.events.addAll(events); - } - - /** - * Set the event list to the given list of events related to the entity - * - * @param events - * events a list of events related to the entity - */ - public void setEvents(List events) { - this.events = events; - } - - /** - * Get the related entities - * - * @return the related entities - */ - @XmlElement(name = "relatedentities") - public Map> getRelatedEntities() { - return relatedEntities; - } - - /** - * Add an entity to the existing related entity map - * - * @param entityType - * the entity type - * @param entityId - * the entity Id - */ - public void addRelatedEntity(String entityType, String entityId) { - Set thisRelatedEntity = relatedEntities.get(entityType); - if (thisRelatedEntity == null) { - thisRelatedEntity = new HashSet(); - relatedEntities.put(entityType, thisRelatedEntity); - } - thisRelatedEntity.add(entityId); - } - - /** - * Add a map of related entities to the existing related entity map - * - * @param relatedEntities - * a map of related entities - */ - public void addRelatedEntities(Map> relatedEntities) { - for (Entry> relatedEntity : - relatedEntities.entrySet()) { - Set thisRelatedEntity = - this.relatedEntities.get(relatedEntity.getKey()); - if (thisRelatedEntity == null) { - this.relatedEntities.put( - relatedEntity.getKey(), relatedEntity.getValue()); - } else { - thisRelatedEntity.addAll(relatedEntity.getValue()); - } - } - } - - /** - * Set the related entity map to the given map of related entities - * - * @param relatedEntities - * a map of related entities - */ - public void setRelatedEntities( - Map> relatedEntities) { - this.relatedEntities = relatedEntities; - } - - /** - * Get the primary filters - * - * @return the primary filters - */ - @XmlElement(name = "primaryfilters") - public Map> getPrimaryFilters() { - return primaryFilters; - } - - /** - * Add a single piece of primary filter to the existing primary filter map - * - * @param key - * the primary filter key - * @param value - * the primary filter value - */ - public void addPrimaryFilter(String key, Object value) { - Set thisPrimaryFilter = primaryFilters.get(key); - if (thisPrimaryFilter == null) { - thisPrimaryFilter = new HashSet(); - primaryFilters.put(key, thisPrimaryFilter); - } - thisPrimaryFilter.add(value); - } - - /** - * Add a map of primary filters to the existing primary filter map - * - * @param primaryFilters - * a map of primary filters - */ - public void addPrimaryFilters(Map> primaryFilters) { - for (Entry> primaryFilter : - primaryFilters.entrySet()) { - Set thisPrimaryFilter = - this.primaryFilters.get(primaryFilter.getKey()); - if (thisPrimaryFilter == null) { - this.primaryFilters.put( - primaryFilter.getKey(), primaryFilter.getValue()); - } else { - thisPrimaryFilter.addAll(primaryFilter.getValue()); - } - } - } - - /** - * Set the primary filter map to the given map of primary filters - * - * @param primaryFilters - * a map of primary filters - */ - public void setPrimaryFilters(Map> primaryFilters) { - this.primaryFilters = primaryFilters; - } - - /** - * Get the other information of the entity - * - * @return the other information of the entity - */ - @XmlElement(name = "otherinfo") - public Map getOtherInfo() { - return otherInfo; - } - - /** - * Add one piece of other information of the entity to the existing other info - * map - * - * @param key - * the other information key - * @param value - * the other information value - */ - public void addOtherInfo(String key, Object value) { - this.otherInfo.put(key, value); - } - - /** - * Add a map of other information of the entity to the existing other info map - * - * @param otherInfo - * a map of other information - */ - public void addOtherInfo(Map otherInfo) { - this.otherInfo.putAll(otherInfo); - } - - /** - * Set the other info map to the given map of other information - * - * @param otherInfo - * a map of other information - */ - public void setOtherInfo(Map otherInfo) { - this.otherInfo = otherInfo; - } - - @Override - public int hashCode() { - // generated by eclipse - final int prime = 31; - int result = 1; - result = prime * result + ((entityId == null) ? 0 : entityId.hashCode()); - result = - prime * result + ((entityType == null) ? 0 : entityType.hashCode()); - result = prime * result + ((events == null) ? 0 : events.hashCode()); - result = prime * result + ((otherInfo == null) ? 0 : otherInfo.hashCode()); - result = - prime * result - + ((primaryFilters == null) ? 0 : primaryFilters.hashCode()); - result = - prime * result - + ((relatedEntities == null) ? 0 : relatedEntities.hashCode()); - result = prime * result + ((startTime == null) ? 0 : startTime.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - // generated by eclipse - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - ATSEntity other = (ATSEntity) obj; - if (entityId == null) { - if (other.entityId != null) - return false; - } else if (!entityId.equals(other.entityId)) - return false; - if (entityType == null) { - if (other.entityType != null) - return false; - } else if (!entityType.equals(other.entityType)) - return false; - if (events == null) { - if (other.events != null) - return false; - } else if (!events.equals(other.events)) - return false; - if (otherInfo == null) { - if (other.otherInfo != null) - return false; - } else if (!otherInfo.equals(other.otherInfo)) - return false; - if (primaryFilters == null) { - if (other.primaryFilters != null) - return false; - } else if (!primaryFilters.equals(other.primaryFilters)) - return false; - if (relatedEntities == null) { - if (other.relatedEntities != null) - return false; - } else if (!relatedEntities.equals(other.relatedEntities)) - return false; - if (startTime == null) { - if (other.startTime != null) - return false; - } else if (!startTime.equals(other.startTime)) - return false; - return true; - } - - @Override - public int compareTo(ATSEntity other) { - int comparison = entityType.compareTo(other.entityType); - if (comparison == 0) { - long thisStartTime = - startTime == null ? Long.MIN_VALUE : startTime; - long otherStartTime = - other.startTime == null ? Long.MIN_VALUE : other.startTime; - if (thisStartTime > otherStartTime) { - return -1; - } else if (thisStartTime < otherStartTime) { - return 1; - } else { - return entityId.compareTo(other.entityId); - } - } else { - return comparison; - } - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvent.java deleted file mode 100644 index 27bac16..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvent.java +++ /dev/null @@ -1,172 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.apptimeline; - -import java.util.HashMap; -import java.util.Map; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -/** - * The class that contains the information of an event that is related to some - * conceptual entity of an application. Users are free to define what the event - * means, such as starting an application, getting allocated a container and - * etc. - */ -@XmlRootElement(name = "event") -@XmlAccessorType(XmlAccessType.NONE) -@Public -@Unstable -public class ATSEvent implements Comparable { - - private long timestamp; - private String eventType; - private Map eventInfo = new HashMap(); - - public ATSEvent() { - } - - /** - * Get the timestamp of the event - * - * @return the timestamp of the event - */ - @XmlElement(name = "timestamp") - public long getTimestamp() { - return timestamp; - } - - /** - * Set the timestamp of the event - * - * @param timestamp - * the timestamp of the event - */ - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } - - /** - * Get the event type - * - * @return the event type - */ - @XmlElement(name = "eventtype") - public String getEventType() { - return eventType; - } - - /** - * Set the event type - * - * @param eventType - * the event type - */ - public void setEventType(String eventType) { - this.eventType = eventType; - } - - /** - * Set the information of the event - * - * @return the information of the event - */ - @XmlElement(name = "eventinfo") - public Map getEventInfo() { - return eventInfo; - } - - /** - * Add one piece of the information of the event to the existing information - * map - * - * @param key - * the information key - * @param value - * the information value - */ - public void addEventInfo(String key, Object value) { - this.eventInfo.put(key, value); - } - - /** - * Add a map of the information of the event to the existing information map - * - * @param eventInfo - * a map of of the information of the event - */ - public void addEventInfo(Map eventInfo) { - this.eventInfo.putAll(eventInfo); - } - - /** - * Set the information map to the given map of the information of the event - * - * @param eventInfo - * a map of of the information of the event - */ - public void setEventInfo(Map eventInfo) { - this.eventInfo = eventInfo; - } - - @Override - public int compareTo(ATSEvent other) { - if (timestamp > other.timestamp) { - return -1; - } else if (timestamp < other.timestamp) { - return 1; - } else { - return eventType.compareTo(other.eventType); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - ATSEvent atsEvent = (ATSEvent) o; - - if (timestamp != atsEvent.timestamp) - return false; - if (!eventType.equals(atsEvent.eventType)) - return false; - if (eventInfo != null ? !eventInfo.equals(atsEvent.eventInfo) : - atsEvent.eventInfo != null) - return false; - - return true; - } - - @Override - public int hashCode() { - int result = (int) (timestamp ^ (timestamp >>> 32)); - result = 31 * result + eventType.hashCode(); - result = 31 * result + (eventInfo != null ? eventInfo.hashCode() : 0); - return result; - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvents.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvents.java deleted file mode 100644 index a08537d..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEvents.java +++ /dev/null @@ -1,189 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.apptimeline; - -import java.util.ArrayList; -import java.util.List; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -/** - * The class that hosts a list of events, which are categorized according to - * their related entities. - */ -@XmlRootElement(name = "events") -@XmlAccessorType(XmlAccessType.NONE) -@Public -@Unstable -public class ATSEvents { - - private List allEvents = - new ArrayList(); - - public ATSEvents() { - - } - - /** - * Get a list of {@link ATSEventsOfOneEntity} instances - * - * @return a list of {@link ATSEventsOfOneEntity} instances - */ - @XmlElement(name = "events") - public List getAllEvents() { - return allEvents; - } - - /** - * Add a single {@link ATSEventsOfOneEntity} instance into the existing list - * - * @param eventsOfOneEntity - * a single {@link ATSEventsOfOneEntity} instance - */ - public void addEvent(ATSEventsOfOneEntity eventsOfOneEntity) { - allEvents.add(eventsOfOneEntity); - } - - /** - * Add a list of {@link ATSEventsOfOneEntity} instances into the existing list - * - * @param allEvents - * a list of {@link ATSEventsOfOneEntity} instances - */ - public void addEvents(List allEvents) { - this.allEvents.addAll(allEvents); - } - - /** - * Set the list to the given list of {@link ATSEventsOfOneEntity} instances - * - * @param allEvents - * a list of {@link ATSEventsOfOneEntity} instances - */ - public void setEvents(List allEvents) { - this.allEvents.clear(); - this.allEvents.addAll(allEvents); - } - - /** - * The class that hosts a list of events that are only related to one entity. - */ - @XmlRootElement(name = "events") - @XmlAccessorType(XmlAccessType.NONE) - @Public - @Unstable - public static class ATSEventsOfOneEntity { - - private String entityId; - private String entityType; - private List events = new ArrayList(); - - public ATSEventsOfOneEntity() { - - } - - /** - * Get the entity Id - * - * @return the entity Id - */ - @XmlElement(name = "entity") - public String getEntityId() { - return entityId; - } - - /** - * Set the entity Id - * - * @param entityId - * the entity Id - */ - public void setEntityId(String entityId) { - this.entityId = entityId; - } - - /** - * Get the entity type - * - * @return the entity type - */ - @XmlElement(name = "entitytype") - public String getEntityType() { - return entityType; - } - - /** - * Set the entity type - * - * @param entityType - * the entity type - */ - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - /** - * Get a list of events - * - * @return a list of events - */ - @XmlElement(name = "events") - public List getEvents() { - return events; - } - - /** - * Add a single event to the existing event list - * - * @param event - * a single event - */ - public void addEvent(ATSEvent event) { - events.add(event); - } - - /** - * Add a list of event to the existing event list - * - * @param events - * a list of events - */ - public void addEvents(List events) { - this.events.addAll(events); - } - - /** - * Set the event list to the given list of events - * - * @param events - * a list of events - */ - public void setEvents(List events) { - this.events = events; - } - - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java deleted file mode 100644 index d330eb4..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.api.records.apptimeline; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; -import java.util.ArrayList; -import java.util.List; - -/** - * A class that holds a list of put errors. This is the response returned - * when a list of {@link ATSEntity} objects is added to the application - * timeline. If there are errors in storing individual entity objects, - * they will be indicated in the list of errors. - */ -@XmlRootElement(name = "errors") -@XmlAccessorType(XmlAccessType.NONE) -@Public -@Unstable -public class ATSPutErrors { - - private List errors = new ArrayList(); - - public ATSPutErrors() { - - } - - /** - * Get a list of {@link ATSPutError} instances - * - * @return a list of {@link ATSPutError} instances - */ - @XmlElement(name = "errors") - public List getErrors() { - return errors; - } - - /** - * Add a single {@link ATSPutError} instance into the existing list - * - * @param error - * a single {@link ATSPutError} instance - */ - public void addError(ATSPutError error) { - errors.add(error); - } - - /** - * Add a list of {@link ATSPutError} instances into the existing list - * - * @param errors - * a list of {@link ATSPutError} instances - */ - public void addErrors(List errors) { - this.errors.addAll(errors); - } - - /** - * Set the list to the given list of {@link ATSPutError} instances - * - * @param errors - * a list of {@link ATSPutError} instances - */ - public void setErrors(List errors) { - this.errors.clear(); - this.errors.addAll(errors); - } - - /** - * A class that holds the error code for one entity. - */ - @XmlRootElement(name = "error") - @XmlAccessorType(XmlAccessType.NONE) - @Public - @Unstable - public static class ATSPutError { - /** - * Error code returned when no start time can be found when putting an - * entity. This occurs when the entity does not already exist in the - * store and it is put with no start time or events specified. - */ - public static final int NO_START_TIME = 1; - /** - * Error code returned if an IOException is encountered when putting an - * entity. - */ - public static final int IO_EXCEPTION = 2; - - private String entityId; - private String entityType; - private int errorCode; - - /** - * Get the entity Id - * - * @return the entity Id - */ - @XmlElement(name = "entity") - public String getEntityId() { - return entityId; - } - - /** - * Set the entity Id - * - * @param entityId - * the entity Id - */ - public void setEntityId(String entityId) { - this.entityId = entityId; - } - - /** - * Get the entity type - * - * @return the entity type - */ - @XmlElement(name = "entitytype") - public String getEntityType() { - return entityType; - } - - /** - * Set the entity type - * - * @param entityType - * the entity type - */ - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - /** - * Get the error code - * - * @return an error code - */ - @XmlElement(name = "errorcode") - public int getErrorCode() { - return errorCode; - } - - /** - * Set the error code to the given error code - * - * @param errorCode - * an error code - */ - public void setErrorCode(int errorCode) { - this.errorCode = errorCode; - } - - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/package-info.java deleted file mode 100644 index b57cad4..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/package-info.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@InterfaceAudience.Public -package org.apache.hadoop.yarn.api.records.apptimeline; -import org.apache.hadoop.classification.InterfaceAudience; - diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntities.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntities.java new file mode 100644 index 0000000..4a5c8f8 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntities.java @@ -0,0 +1,88 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.timeline; + +import java.util.ArrayList; +import java.util.List; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * The class that hosts a list of timeline entities. + */ +@XmlRootElement(name = "entities") +@XmlAccessorType(XmlAccessType.NONE) +@Public +@Unstable +public class TimelineEntities { + + private List entities = + new ArrayList(); + + public TimelineEntities() { + + } + + /** + * Get a list of entities + * + * @return a list of entities + */ + @XmlElement(name = "entities") + public List getEntities() { + return entities; + } + + /** + * Add a single entity into the existing entity list + * + * @param entity + * a single entity + */ + public void addEntity(TimelineEntity entity) { + entities.add(entity); + } + + /** + * All a list of entities into the existing entity list + * + * @param entities + * a list of entities + */ + public void addEntities(List entities) { + this.entities.addAll(entities); + } + + /** + * Set the entity list to the given list of entities + * + * @param entities + * a list of entities + */ + public void setEntities(List entities) { + this.entities = entities; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java new file mode 100644 index 0000000..20304bd --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java @@ -0,0 +1,416 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.timeline; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + *

+ * The class that contains the the meta information of some conceptual entity + * and its related events. The entity can be an application, an application + * attempt, a container or whatever the user-defined object. + *

+ * + *

+ * Primary filters will be used to index the entities in + * TimelineStore, such that users should carefully choose the + * information they want to store as the primary filters. The remaining can be + * stored as other information. + *

+ */ +@XmlRootElement(name = "entity") +@XmlAccessorType(XmlAccessType.NONE) +@Public +@Unstable +public class TimelineEntity implements Comparable { + + private String entityType; + private String entityId; + private Long startTime; + private List events = new ArrayList(); + private Map> relatedEntities = + new HashMap>(); + private Map> primaryFilters = + new HashMap>(); + private Map otherInfo = + new HashMap(); + + public TimelineEntity() { + + } + + /** + * Get the entity type + * + * @return the entity type + */ + @XmlElement(name = "entitytype") + public String getEntityType() { + return entityType; + } + + /** + * Set the entity type + * + * @param entityType + * the entity type + */ + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + /** + * Get the entity Id + * + * @return the entity Id + */ + @XmlElement(name = "entity") + public String getEntityId() { + return entityId; + } + + /** + * Set the entity Id + * + * @param entityId + * the entity Id + */ + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + /** + * Get the start time of the entity + * + * @return the start time of the entity + */ + @XmlElement(name = "starttime") + public Long getStartTime() { + return startTime; + } + + /** + * Set the start time of the entity + * + * @param startTime + * the start time of the entity + */ + public void setStartTime(Long startTime) { + this.startTime = startTime; + } + + /** + * Get a list of events related to the entity + * + * @return a list of events related to the entity + */ + @XmlElement(name = "events") + public List getEvents() { + return events; + } + + /** + * Add a single event related to the entity to the existing event list + * + * @param event + * a single event related to the entity + */ + public void addEvent(TimelineEvent event) { + events.add(event); + } + + /** + * Add a list of events related to the entity to the existing event list + * + * @param events + * a list of events related to the entity + */ + public void addEvents(List events) { + this.events.addAll(events); + } + + /** + * Set the event list to the given list of events related to the entity + * + * @param events + * events a list of events related to the entity + */ + public void setEvents(List events) { + this.events = events; + } + + /** + * Get the related entities + * + * @return the related entities + */ + @XmlElement(name = "relatedentities") + public Map> getRelatedEntities() { + return relatedEntities; + } + + /** + * Add an entity to the existing related entity map + * + * @param entityType + * the entity type + * @param entityId + * the entity Id + */ + public void addRelatedEntity(String entityType, String entityId) { + Set thisRelatedEntity = relatedEntities.get(entityType); + if (thisRelatedEntity == null) { + thisRelatedEntity = new HashSet(); + relatedEntities.put(entityType, thisRelatedEntity); + } + thisRelatedEntity.add(entityId); + } + + /** + * Add a map of related entities to the existing related entity map + * + * @param relatedEntities + * a map of related entities + */ + public void addRelatedEntities(Map> relatedEntities) { + for (Entry> relatedEntity : relatedEntities.entrySet()) { + Set thisRelatedEntity = + this.relatedEntities.get(relatedEntity.getKey()); + if (thisRelatedEntity == null) { + this.relatedEntities.put( + relatedEntity.getKey(), relatedEntity.getValue()); + } else { + thisRelatedEntity.addAll(relatedEntity.getValue()); + } + } + } + + /** + * Set the related entity map to the given map of related entities + * + * @param relatedEntities + * a map of related entities + */ + public void setRelatedEntities( + Map> relatedEntities) { + this.relatedEntities = relatedEntities; + } + + /** + * Get the primary filters + * + * @return the primary filters + */ + @XmlElement(name = "primaryfilters") + public Map> getPrimaryFilters() { + return primaryFilters; + } + + /** + * Add a single piece of primary filter to the existing primary filter map + * + * @param key + * the primary filter key + * @param value + * the primary filter value + */ + public void addPrimaryFilter(String key, Object value) { + Set thisPrimaryFilter = primaryFilters.get(key); + if (thisPrimaryFilter == null) { + thisPrimaryFilter = new HashSet(); + primaryFilters.put(key, thisPrimaryFilter); + } + thisPrimaryFilter.add(value); + } + + /** + * Add a map of primary filters to the existing primary filter map + * + * @param primaryFilters + * a map of primary filters + */ + public void addPrimaryFilters(Map> primaryFilters) { + for (Entry> primaryFilter : primaryFilters.entrySet()) { + Set thisPrimaryFilter = + this.primaryFilters.get(primaryFilter.getKey()); + if (thisPrimaryFilter == null) { + this.primaryFilters.put( + primaryFilter.getKey(), primaryFilter.getValue()); + } else { + thisPrimaryFilter.addAll(primaryFilter.getValue()); + } + } + } + + /** + * Set the primary filter map to the given map of primary filters + * + * @param primaryFilters + * a map of primary filters + */ + public void setPrimaryFilters(Map> primaryFilters) { + this.primaryFilters = primaryFilters; + } + + /** + * Get the other information of the entity + * + * @return the other information of the entity + */ + @XmlElement(name = "otherinfo") + public Map getOtherInfo() { + return otherInfo; + } + + /** + * Add one piece of other information of the entity to the existing other info + * map + * + * @param key + * the other information key + * @param value + * the other information value + */ + public void addOtherInfo(String key, Object value) { + this.otherInfo.put(key, value); + } + + /** + * Add a map of other information of the entity to the existing other info map + * + * @param otherInfo + * a map of other information + */ + public void addOtherInfo(Map otherInfo) { + this.otherInfo.putAll(otherInfo); + } + + /** + * Set the other info map to the given map of other information + * + * @param otherInfo + * a map of other information + */ + public void setOtherInfo(Map otherInfo) { + this.otherInfo = otherInfo; + } + + @Override + public int hashCode() { + // generated by eclipse + final int prime = 31; + int result = 1; + result = prime * result + ((entityId == null) ? 0 : entityId.hashCode()); + result = + prime * result + ((entityType == null) ? 0 : entityType.hashCode()); + result = prime * result + ((events == null) ? 0 : events.hashCode()); + result = prime * result + ((otherInfo == null) ? 0 : otherInfo.hashCode()); + result = + prime * result + + ((primaryFilters == null) ? 0 : primaryFilters.hashCode()); + result = + prime * result + + ((relatedEntities == null) ? 0 : relatedEntities.hashCode()); + result = prime * result + ((startTime == null) ? 0 : startTime.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + // generated by eclipse + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + TimelineEntity other = (TimelineEntity) obj; + if (entityId == null) { + if (other.entityId != null) + return false; + } else if (!entityId.equals(other.entityId)) + return false; + if (entityType == null) { + if (other.entityType != null) + return false; + } else if (!entityType.equals(other.entityType)) + return false; + if (events == null) { + if (other.events != null) + return false; + } else if (!events.equals(other.events)) + return false; + if (otherInfo == null) { + if (other.otherInfo != null) + return false; + } else if (!otherInfo.equals(other.otherInfo)) + return false; + if (primaryFilters == null) { + if (other.primaryFilters != null) + return false; + } else if (!primaryFilters.equals(other.primaryFilters)) + return false; + if (relatedEntities == null) { + if (other.relatedEntities != null) + return false; + } else if (!relatedEntities.equals(other.relatedEntities)) + return false; + if (startTime == null) { + if (other.startTime != null) + return false; + } else if (!startTime.equals(other.startTime)) + return false; + return true; + } + + @Override + public int compareTo(TimelineEntity other) { + int comparison = entityType.compareTo(other.entityType); + if (comparison == 0) { + long thisStartTime = + startTime == null ? Long.MIN_VALUE : startTime; + long otherStartTime = + other.startTime == null ? Long.MIN_VALUE : other.startTime; + if (thisStartTime > otherStartTime) { + return -1; + } else if (thisStartTime < otherStartTime) { + return 1; + } else { + return entityId.compareTo(other.entityId); + } + } else { + return comparison; + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEvent.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEvent.java new file mode 100644 index 0000000..aa49538 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEvent.java @@ -0,0 +1,172 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.timeline; + +import java.util.HashMap; +import java.util.Map; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * The class that contains the information of an event that is related to some + * conceptual entity of an application. Users are free to define what the event + * means, such as starting an application, getting allocated a container and + * etc. + */ +@XmlRootElement(name = "event") +@XmlAccessorType(XmlAccessType.NONE) +@Public +@Unstable +public class TimelineEvent implements Comparable { + + private long timestamp; + private String eventType; + private Map eventInfo = new HashMap(); + + public TimelineEvent() { + } + + /** + * Get the timestamp of the event + * + * @return the timestamp of the event + */ + @XmlElement(name = "timestamp") + public long getTimestamp() { + return timestamp; + } + + /** + * Set the timestamp of the event + * + * @param timestamp + * the timestamp of the event + */ + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + /** + * Get the event type + * + * @return the event type + */ + @XmlElement(name = "eventtype") + public String getEventType() { + return eventType; + } + + /** + * Set the event type + * + * @param eventType + * the event type + */ + public void setEventType(String eventType) { + this.eventType = eventType; + } + + /** + * Set the information of the event + * + * @return the information of the event + */ + @XmlElement(name = "eventinfo") + public Map getEventInfo() { + return eventInfo; + } + + /** + * Add one piece of the information of the event to the existing information + * map + * + * @param key + * the information key + * @param value + * the information value + */ + public void addEventInfo(String key, Object value) { + this.eventInfo.put(key, value); + } + + /** + * Add a map of the information of the event to the existing information map + * + * @param eventInfo + * a map of of the information of the event + */ + public void addEventInfo(Map eventInfo) { + this.eventInfo.putAll(eventInfo); + } + + /** + * Set the information map to the given map of the information of the event + * + * @param eventInfo + * a map of of the information of the event + */ + public void setEventInfo(Map eventInfo) { + this.eventInfo = eventInfo; + } + + @Override + public int compareTo(TimelineEvent other) { + if (timestamp > other.timestamp) { + return -1; + } else if (timestamp < other.timestamp) { + return 1; + } else { + return eventType.compareTo(other.eventType); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + TimelineEvent event = (TimelineEvent) o; + + if (timestamp != event.timestamp) + return false; + if (!eventType.equals(event.eventType)) + return false; + if (eventInfo != null ? !eventInfo.equals(event.eventInfo) : + event.eventInfo != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = (int) (timestamp ^ (timestamp >>> 32)); + result = 31 * result + eventType.hashCode(); + result = 31 * result + (eventInfo != null ? eventInfo.hashCode() : 0); + return result; + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEvents.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEvents.java new file mode 100644 index 0000000..da62c27 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEvents.java @@ -0,0 +1,189 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.timeline; + +import java.util.ArrayList; +import java.util.List; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * The class that hosts a list of events, which are categorized according to + * their related entities. + */ +@XmlRootElement(name = "events") +@XmlAccessorType(XmlAccessType.NONE) +@Public +@Unstable +public class TimelineEvents { + + private List allEvents = + new ArrayList(); + + public TimelineEvents() { + + } + + /** + * Get a list of {@link EventsOfOneEntity} instances + * + * @return a list of {@link EventsOfOneEntity} instances + */ + @XmlElement(name = "events") + public List getAllEvents() { + return allEvents; + } + + /** + * Add a single {@link EventsOfOneEntity} instance into the existing list + * + * @param eventsOfOneEntity + * a single {@link EventsOfOneEntity} instance + */ + public void addEvent(EventsOfOneEntity eventsOfOneEntity) { + allEvents.add(eventsOfOneEntity); + } + + /** + * Add a list of {@link EventsOfOneEntity} instances into the existing list + * + * @param allEvents + * a list of {@link EventsOfOneEntity} instances + */ + public void addEvents(List allEvents) { + this.allEvents.addAll(allEvents); + } + + /** + * Set the list to the given list of {@link EventsOfOneEntity} instances + * + * @param allEvents + * a list of {@link EventsOfOneEntity} instances + */ + public void setEvents(List allEvents) { + this.allEvents.clear(); + this.allEvents.addAll(allEvents); + } + + /** + * The class that hosts a list of events that are only related to one entity. + */ + @XmlRootElement(name = "events") + @XmlAccessorType(XmlAccessType.NONE) + @Public + @Unstable + public static class EventsOfOneEntity { + + private String entityId; + private String entityType; + private List events = new ArrayList(); + + public EventsOfOneEntity() { + + } + + /** + * Get the entity Id + * + * @return the entity Id + */ + @XmlElement(name = "entity") + public String getEntityId() { + return entityId; + } + + /** + * Set the entity Id + * + * @param entityId + * the entity Id + */ + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + /** + * Get the entity type + * + * @return the entity type + */ + @XmlElement(name = "entitytype") + public String getEntityType() { + return entityType; + } + + /** + * Set the entity type + * + * @param entityType + * the entity type + */ + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + /** + * Get a list of events + * + * @return a list of events + */ + @XmlElement(name = "events") + public List getEvents() { + return events; + } + + /** + * Add a single event to the existing event list + * + * @param event + * a single event + */ + public void addEvent(TimelineEvent event) { + events.add(event); + } + + /** + * Add a list of event to the existing event list + * + * @param events + * a list of events + */ + public void addEvents(List events) { + this.events.addAll(events); + } + + /** + * Set the event list to the given list of events + * + * @param events + * a list of events + */ + public void setEvents(List events) { + this.events = events; + } + + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java new file mode 100644 index 0000000..37c0046 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java @@ -0,0 +1,176 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.api.records.timeline; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.ArrayList; +import java.util.List; + +/** + * A class that holds a list of put errors. This is the response returned when a + * list of {@link TimelineEntity} objects is added to the timeline. If there are errors + * in storing individual entity objects, they will be indicated in the list of + * errors. + */ +@XmlRootElement(name = "response") +@XmlAccessorType(XmlAccessType.NONE) +@Public +@Unstable +public class TimelinePutResponse { + + private List errors = new ArrayList(); + + public TimelinePutResponse() { + + } + + /** + * Get a list of {@link TimelinePutError} instances + * + * @return a list of {@link TimelinePutError} instances + */ + @XmlElement(name = "errors") + public List getErrors() { + return errors; + } + + /** + * Add a single {@link TimelinePutError} instance into the existing list + * + * @param error + * a single {@link TimelinePutError} instance + */ + public void addError(TimelinePutError error) { + errors.add(error); + } + + /** + * Add a list of {@link TimelinePutError} instances into the existing list + * + * @param errors + * a list of {@link TimelinePutError} instances + */ + public void addErrors(List errors) { + this.errors.addAll(errors); + } + + /** + * Set the list to the given list of {@link TimelinePutError} instances + * + * @param errors + * a list of {@link TimelinePutError} instances + */ + public void setErrors(List errors) { + this.errors.clear(); + this.errors.addAll(errors); + } + + /** + * A class that holds the error code for one entity. + */ + @XmlRootElement(name = "error") + @XmlAccessorType(XmlAccessType.NONE) + @Public + @Unstable + public static class TimelinePutError { + + /** + * Error code returned when no start time can be found when putting an + * entity. This occurs when the entity does not already exist in the store + * and it is put with no start time or events specified. + */ + public static final int NO_START_TIME = 1; + /** + * Error code returned if an IOException is encountered when putting an + * entity. + */ + public static final int IO_EXCEPTION = 2; + + private String entityId; + private String entityType; + private int errorCode; + + /** + * Get the entity Id + * + * @return the entity Id + */ + @XmlElement(name = "entity") + public String getEntityId() { + return entityId; + } + + /** + * Set the entity Id + * + * @param entityId + * the entity Id + */ + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + /** + * Get the entity type + * + * @return the entity type + */ + @XmlElement(name = "entitytype") + public String getEntityType() { + return entityType; + } + + /** + * Set the entity type + * + * @param entityType + * the entity type + */ + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + /** + * Get the error code + * + * @return an error code + */ + @XmlElement(name = "errorcode") + public int getErrorCode() { + return errorCode; + } + + /** + * Set the error code to the given error code + * + * @param errorCode + * an error code + */ + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/package-info.java new file mode 100644 index 0000000..e91ddd4 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/package-info.java @@ -0,0 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@InterfaceAudience.Public +package org.apache.hadoop.yarn.api.records.timeline; +import org.apache.hadoop.classification.InterfaceAudience; + diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index e30434f..0e89ce8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -1051,17 +1051,19 @@ AHS_PREFIX + "webapp.spnego-keytab-file"; //////////////////////////////// - // ATS Configs + // Timeline Service Configs //////////////////////////////// - public static final String ATS_PREFIX = YARN_PREFIX + "ats."; + public static final String TIMELINE_SERVICE_PREFIX = + YARN_PREFIX + "timeline-service."; - /** ATS store class */ - public static final String ATS_STORE = ATS_PREFIX + "store.class"; + /** Timeline service store class */ + public static final String TIMELINE_SERVICE_STORE = + TIMELINE_SERVICE_PREFIX + "store-class"; - /** ATS leveldb path */ - public static final String ATS_LEVELDB_PATH_PROPERTY = - ATS_PREFIX + "leveldb-apptimeline-store.path"; + /** Timeline service leveldb path */ + public static final String TIMELINE_SERVICE_LEVELDB_PATH = + TIMELINE_SERVICE_PREFIX + "leveldb-timeline-store.path"; //////////////////////////////// // Other Configs diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java index 8be00ac..fda0a4f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java @@ -24,8 +24,8 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -33,7 +33,7 @@ * A client library that can be used to post some information in terms of a * number of conceptual entities. * - * @See ATSEntity + * @See Entity */ @Public @Unstable @@ -52,19 +52,19 @@ protected TimelineClient(String name) { /** *

- * Post the information of a number of conceptual entities of an application - * to the timeline server. It is a blocking API. The method will not return - * until it gets the response from the timeline server. + * Send the information of a number of conceptual entities to the timeline + * server. It is a blocking API. The method will not return until it gets the + * response from the timeline server. *

* * @param entities - * the collection of {@link ATSEntity} - * @return the error information if the post entities are not correctly stored + * the collection of {@link TimelineEntity} + * @return the error information if the sent entities are not correctly stored * @throws IOException * @throws YarnException */ @Public - public abstract ATSPutErrors postEntities( - ATSEntity... entities) throws IOException, YarnException; + public abstract TimelinePutResponse putEntities( + TimelineEntity... entities) throws IOException, YarnException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java index 3269b8b..cb5bdcc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java @@ -29,9 +29,9 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -50,7 +50,7 @@ public class TimelineClientImpl extends TimelineClient { private static final Log LOG = LogFactory.getLog(TimelineClientImpl.class); - private static final String RESOURCE_URI_STR = "/ws/v1/apptimeline/"; + private static final String RESOURCE_URI_STR = "/ws/v1/timeline/"; private static final Joiner JOINER = Joiner.on(""); private Client client; @@ -79,9 +79,9 @@ protected void serviceInit(Configuration conf) throws Exception { } @Override - public ATSPutErrors postEntities( - ATSEntity... entities) throws IOException, YarnException { - ATSEntities entitiesContainer = new ATSEntities(); + public TimelinePutResponse putEntities( + TimelineEntity... entities) throws IOException, YarnException { + TimelineEntities entitiesContainer = new TimelineEntities(); entitiesContainer.addEntities(Arrays.asList(entities)); ClientResponse resp = doPostingEntities(entitiesContainer); if (resp.getClientResponseStatus() != ClientResponse.Status.OK) { @@ -95,12 +95,12 @@ public ATSPutErrors postEntities( } throw new YarnException(msg); } - return resp.getEntity(ATSPutErrors.class); + return resp.getEntity(TimelinePutResponse.class); } @Private @VisibleForTesting - public ClientResponse doPostingEntities(ATSEntities entities) { + public ClientResponse doPostingEntities(TimelineEntities entities) { WebResource webResource = client.resource(resURI); return webResource.accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java index a3917a2..3804757 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java @@ -25,10 +25,10 @@ import static org.mockito.Mockito.when; import junit.framework.Assert; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -58,8 +58,8 @@ public void tearDown() { public void testPostEntities() throws Exception { mockClientResponse(ClientResponse.Status.OK, false); try { - ATSPutErrors errors = client.postEntities(generateATSEntity()); - Assert.assertEquals(0, errors.getErrors().size()); + TimelinePutResponse response = client.putEntities(generateEntity()); + Assert.assertEquals(0, response.getErrors().size()); } catch (YarnException e) { Assert.fail("Exception is not expected"); } @@ -69,14 +69,14 @@ public void testPostEntities() throws Exception { public void testPostEntitiesWithError() throws Exception { mockClientResponse(ClientResponse.Status.OK, true); try { - ATSPutErrors errors = client.postEntities(generateATSEntity()); - Assert.assertEquals(1, errors.getErrors().size()); - Assert.assertEquals("test entity id", errors.getErrors().get(0) + TimelinePutResponse response = client.putEntities(generateEntity()); + Assert.assertEquals(1, response.getErrors().size()); + Assert.assertEquals("test entity id", response.getErrors().get(0) .getEntityId()); - Assert.assertEquals("test entity type", errors.getErrors().get(0) + Assert.assertEquals("test entity type", response.getErrors().get(0) .getEntityType()); - Assert.assertEquals(ATSPutErrors.ATSPutError.IO_EXCEPTION, - errors.getErrors().get(0).getErrorCode()); + Assert.assertEquals(TimelinePutResponse.TimelinePutError.IO_EXCEPTION, + response.getErrors().get(0).getErrorCode()); } catch (YarnException e) { Assert.fail("Exception is not expected"); } @@ -86,7 +86,7 @@ public void testPostEntitiesWithError() throws Exception { public void testPostEntitiesNoResponse() throws Exception { mockClientResponse(ClientResponse.Status.INTERNAL_SERVER_ERROR, false); try { - client.postEntities(generateATSEntity()); + client.putEntities(generateEntity()); Assert.fail("Exception is expected"); } catch (YarnException e) { Assert.assertTrue(e.getMessage().contains( @@ -98,27 +98,28 @@ private ClientResponse mockClientResponse(ClientResponse.Status status, boolean hasError) { ClientResponse response = mock(ClientResponse.class); doReturn(response).when(client) - .doPostingEntities(any(ATSEntities.class)); + .doPostingEntities(any(TimelineEntities.class)); when(response.getClientResponseStatus()).thenReturn(status); - ATSPutErrors.ATSPutError error = new ATSPutErrors.ATSPutError(); + TimelinePutResponse.TimelinePutError error = + new TimelinePutResponse.TimelinePutError(); error.setEntityId("test entity id"); error.setEntityType("test entity type"); - error.setErrorCode(ATSPutErrors.ATSPutError.IO_EXCEPTION); - ATSPutErrors errors = new ATSPutErrors(); + error.setErrorCode(TimelinePutResponse.TimelinePutError.IO_EXCEPTION); + TimelinePutResponse putResponse = new TimelinePutResponse(); if (hasError) { - errors.addError(error); + putResponse.addError(error); } - when(response.getEntity(ATSPutErrors.class)).thenReturn(errors); + when(response.getEntity(TimelinePutResponse.class)).thenReturn(putResponse); return response; } - private static ATSEntity generateATSEntity() { - ATSEntity entity = new ATSEntity(); + private static TimelineEntity generateEntity() { + TimelineEntity entity = new TimelineEntity(); entity.setEntityId("entity id"); entity.setEntityType("entity type"); entity.setStartTime(System.currentTimeMillis()); for (int i = 0; i < 2; ++i) { - ATSEvent event = new ATSEvent(); + TimelineEvent event = new TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setEventType("test event type " + i); event.addEventInfo("key1", "val1"); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/TimelineUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/TimelineUtils.java deleted file mode 100644 index 4ab557e..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/TimelineUtils.java +++ /dev/null @@ -1,86 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.util; - -import java.io.IOException; - -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Evolving; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.map.AnnotationIntrospector; -import org.codehaus.jackson.map.JsonMappingException; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; -import org.codehaus.jackson.xc.JaxbAnnotationIntrospector; - -/** - * The helper class for the timeline module. - * - */ -@Public -@Evolving -public class TimelineUtils { - - private static ObjectMapper mapper; - - static { - mapper = new ObjectMapper(); - AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(); - mapper.setAnnotationIntrospector(introspector); - mapper.getSerializationConfig() - .setSerializationInclusion(Inclusion.NON_NULL); - } - - /** - * Serialize a POJO object into a JSON string not in a pretty format - * - * @param o - * an object to serialize - * @return a JSON string - * @throws IOException - * @throws JsonMappingException - * @throws JsonGenerationException - */ - public static String dumpTimelineRecordtoJSON(Object o) - throws JsonGenerationException, JsonMappingException, IOException { - return dumpTimelineRecordtoJSON(o, false); - } - - /** - * Serialize a POJO object into a JSON string - * - * @param o - * an object to serialize - * @param pretty - * whether in a pretty format or not - * @return a JSON string - * @throws IOException - * @throws JsonMappingException - * @throws JsonGenerationException - */ - public static String dumpTimelineRecordtoJSON(Object o, boolean pretty) - throws JsonGenerationException, JsonMappingException, IOException { - if (pretty) { - return mapper.defaultPrettyPrintingWriter().writeValueAsString(o); - } else { - return mapper.writeValueAsString(o); - } - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java new file mode 100644 index 0000000..35d8560 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java @@ -0,0 +1,86 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util.timeline; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.codehaus.jackson.JsonGenerationException; +import org.codehaus.jackson.map.AnnotationIntrospector; +import org.codehaus.jackson.map.JsonMappingException; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; +import org.codehaus.jackson.xc.JaxbAnnotationIntrospector; + +/** + * The helper class for the timeline module. + * + */ +@Public +@Evolving +public class TimelineUtils { + + private static ObjectMapper mapper; + + static { + mapper = new ObjectMapper(); + AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(); + mapper.setAnnotationIntrospector(introspector); + mapper.getSerializationConfig() + .setSerializationInclusion(Inclusion.NON_NULL); + } + + /** + * Serialize a POJO object into a JSON string not in a pretty format + * + * @param o + * an object to serialize + * @return a JSON string + * @throws IOException + * @throws JsonMappingException + * @throws JsonGenerationException + */ + public static String dumpTimelineRecordtoJSON(Object o) + throws JsonGenerationException, JsonMappingException, IOException { + return dumpTimelineRecordtoJSON(o, false); + } + + /** + * Serialize a POJO object into a JSON string + * + * @param o + * an object to serialize + * @param pretty + * whether in a pretty format or not + * @return a JSON string + * @throws IOException + * @throws JsonMappingException + * @throws JsonGenerationException + */ + public static String dumpTimelineRecordtoJSON(Object o, boolean pretty) + throws JsonGenerationException, JsonMappingException, IOException { + if (pretty) { + return mapper.defaultPrettyPrintingWriter().writeValueAsString(o); + } else { + return mapper.writeValueAsString(o); + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/package-info.java new file mode 100644 index 0000000..5c18a55 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/package-info.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@InterfaceAudience.Public +package org.apache.hadoop.yarn.util.timeline; +import org.apache.hadoop.classification.InterfaceAudience; + diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index cc8b124..7e21008 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -1140,18 +1140,18 @@ org.apache.hadoop.yarn.server.applicationhistoryservice.FileSystemApplicationHistoryStore - + - Store class name for application timeline store - yarn.ats.store.class - org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.LeveldbApplicationTimelineStore + Store class name for timeline store + yarn.timeline-service.store-class + org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore - Store file name for leveldb application timeline store - yarn.ats.leveldb-apptimeline-store.path - ${yarn.log.dir}/ats + Store file name for leveldb timeline store + yarn.timeline-service.leveldb-timeline-store.path + ${yarn.log.dir}/timeline diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java deleted file mode 100644 index 330e099..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java +++ /dev/null @@ -1,159 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.apptimeline; - -import java.util.ArrayList; -import java.util.List; - -import junit.framework.Assert; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; -import org.apache.hadoop.yarn.util.TimelineUtils; -import org.junit.Test; - -public class TestApplicationTimelineRecords { - - private static final Log LOG = - LogFactory.getLog(TestApplicationTimelineRecords.class); - - @Test - public void testATSEntities() throws Exception { - ATSEntities entities = new ATSEntities(); - for (int j = 0; j < 2; ++j) { - ATSEntity entity = new ATSEntity(); - entity.setEntityId("entity id " + j); - entity.setEntityType("entity type " + j); - entity.setStartTime(System.currentTimeMillis()); - for (int i = 0; i < 2; ++i) { - ATSEvent event = new ATSEvent(); - event.setTimestamp(System.currentTimeMillis()); - event.setEventType("event type " + i); - event.addEventInfo("key1", "val1"); - event.addEventInfo("key2", "val2"); - entity.addEvent(event); - } - entity.addRelatedEntity("test ref type 1", "test ref id 1"); - entity.addRelatedEntity("test ref type 2", "test ref id 2"); - entity.addPrimaryFilter("pkey1", "pval1"); - entity.addPrimaryFilter("pkey2", "pval2"); - entity.addOtherInfo("okey1", "oval1"); - entity.addOtherInfo("okey2", "oval2"); - entities.addEntity(entity); - } - LOG.info("Entities in JSON:"); - LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true)); - - Assert.assertEquals(2, entities.getEntities().size()); - ATSEntity entity1 = entities.getEntities().get(0); - Assert.assertEquals("entity id 0", entity1.getEntityId()); - Assert.assertEquals("entity type 0", entity1.getEntityType()); - Assert.assertEquals(2, entity1.getRelatedEntities().size()); - Assert.assertEquals(2, entity1.getEvents().size()); - Assert.assertEquals(2, entity1.getPrimaryFilters().size()); - Assert.assertEquals(2, entity1.getOtherInfo().size()); - ATSEntity entity2 = entities.getEntities().get(1); - Assert.assertEquals("entity id 1", entity2.getEntityId()); - Assert.assertEquals("entity type 1", entity2.getEntityType()); - Assert.assertEquals(2, entity2.getRelatedEntities().size()); - Assert.assertEquals(2, entity2.getEvents().size()); - Assert.assertEquals(2, entity2.getPrimaryFilters().size()); - Assert.assertEquals(2, entity2.getOtherInfo().size()); - } - - @Test - public void testATSEvents() throws Exception { - ATSEvents events = new ATSEvents(); - for (int j = 0; j < 2; ++j) { - ATSEvents.ATSEventsOfOneEntity partEvents = - new ATSEvents.ATSEventsOfOneEntity(); - partEvents.setEntityId("entity id " + j); - partEvents.setEntityType("entity type " + j); - for (int i = 0; i < 2; ++i) { - ATSEvent event = new ATSEvent(); - event.setTimestamp(System.currentTimeMillis()); - event.setEventType("event type " + i); - event.addEventInfo("key1", "val1"); - event.addEventInfo("key2", "val2"); - partEvents.addEvent(event); - } - events.addEvent(partEvents); - } - LOG.info("Events in JSON:"); - LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true)); - - Assert.assertEquals(2, events.getAllEvents().size()); - ATSEvents.ATSEventsOfOneEntity partEvents1 = events.getAllEvents().get(0); - Assert.assertEquals("entity id 0", partEvents1.getEntityId()); - Assert.assertEquals("entity type 0", partEvents1.getEntityType()); - Assert.assertEquals(2, partEvents1.getEvents().size()); - ATSEvent event11 = partEvents1.getEvents().get(0); - Assert.assertEquals("event type 0", event11.getEventType()); - Assert.assertEquals(2, event11.getEventInfo().size()); - ATSEvent event12 = partEvents1.getEvents().get(1); - Assert.assertEquals("event type 1", event12.getEventType()); - Assert.assertEquals(2, event12.getEventInfo().size()); - ATSEvents.ATSEventsOfOneEntity partEvents2 = events.getAllEvents().get(1); - Assert.assertEquals("entity id 1", partEvents2.getEntityId()); - Assert.assertEquals("entity type 1", partEvents2.getEntityType()); - Assert.assertEquals(2, partEvents2.getEvents().size()); - ATSEvent event21 = partEvents2.getEvents().get(0); - Assert.assertEquals("event type 0", event21.getEventType()); - Assert.assertEquals(2, event21.getEventInfo().size()); - ATSEvent event22 = partEvents2.getEvents().get(1); - Assert.assertEquals("event type 1", event22.getEventType()); - Assert.assertEquals(2, event22.getEventInfo().size()); - } - - @Test - public void testATSPutErrors() throws Exception { - ATSPutErrors atsPutErrors = new ATSPutErrors(); - ATSPutError error1 = new ATSPutError(); - error1.setEntityId("entity id 1"); - error1.setEntityId("entity type 1"); - error1.setErrorCode(ATSPutError.NO_START_TIME); - atsPutErrors.addError(error1); - List errors = new ArrayList(); - errors.add(error1); - ATSPutError error2 = new ATSPutError(); - error2.setEntityId("entity id 2"); - error2.setEntityId("entity type 2"); - error2.setErrorCode(ATSPutError.IO_EXCEPTION); - errors.add(error2); - atsPutErrors.addErrors(errors); - LOG.info("Errors in JSON:"); - LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(atsPutErrors, true)); - - Assert.assertEquals(3, atsPutErrors.getErrors().size()); - ATSPutError e = atsPutErrors.getErrors().get(0); - Assert.assertEquals(error1.getEntityId(), e.getEntityId()); - Assert.assertEquals(error1.getEntityType(), e.getEntityType()); - Assert.assertEquals(error1.getErrorCode(), e.getErrorCode()); - e = atsPutErrors.getErrors().get(1); - Assert.assertEquals(error1.getEntityId(), e.getEntityId()); - Assert.assertEquals(error1.getEntityType(), e.getEntityType()); - Assert.assertEquals(error1.getErrorCode(), e.getErrorCode()); - e = atsPutErrors.getErrors().get(2); - Assert.assertEquals(error2.getEntityId(), e.getEntityId()); - Assert.assertEquals(error2.getEntityType(), e.getEntityType()); - Assert.assertEquals(error2.getErrorCode(), e.getErrorCode()); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java new file mode 100644 index 0000000..5de8e71 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java @@ -0,0 +1,164 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.timeline; + +import java.util.ArrayList; +import java.util.List; + +import junit.framework.Assert; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; +import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import org.junit.Test; + +public class TestTimelineRecords { + + private static final Log LOG = + LogFactory.getLog(TestTimelineRecords.class); + + @Test + public void testEntities() throws Exception { + TimelineEntities entities = new TimelineEntities(); + for (int j = 0; j < 2; ++j) { + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("entity id " + j); + entity.setEntityType("entity type " + j); + entity.setStartTime(System.currentTimeMillis()); + for (int i = 0; i < 2; ++i) { + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(System.currentTimeMillis()); + event.setEventType("event type " + i); + event.addEventInfo("key1", "val1"); + event.addEventInfo("key2", "val2"); + entity.addEvent(event); + } + entity.addRelatedEntity("test ref type 1", "test ref id 1"); + entity.addRelatedEntity("test ref type 2", "test ref id 2"); + entity.addPrimaryFilter("pkey1", "pval1"); + entity.addPrimaryFilter("pkey2", "pval2"); + entity.addOtherInfo("okey1", "oval1"); + entity.addOtherInfo("okey2", "oval2"); + entities.addEntity(entity); + } + LOG.info("Entities in JSON:"); + LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true)); + + Assert.assertEquals(2, entities.getEntities().size()); + TimelineEntity entity1 = entities.getEntities().get(0); + Assert.assertEquals("entity id 0", entity1.getEntityId()); + Assert.assertEquals("entity type 0", entity1.getEntityType()); + Assert.assertEquals(2, entity1.getRelatedEntities().size()); + Assert.assertEquals(2, entity1.getEvents().size()); + Assert.assertEquals(2, entity1.getPrimaryFilters().size()); + Assert.assertEquals(2, entity1.getOtherInfo().size()); + TimelineEntity entity2 = entities.getEntities().get(1); + Assert.assertEquals("entity id 1", entity2.getEntityId()); + Assert.assertEquals("entity type 1", entity2.getEntityType()); + Assert.assertEquals(2, entity2.getRelatedEntities().size()); + Assert.assertEquals(2, entity2.getEvents().size()); + Assert.assertEquals(2, entity2.getPrimaryFilters().size()); + Assert.assertEquals(2, entity2.getOtherInfo().size()); + } + + @Test + public void testEvents() throws Exception { + TimelineEvents events = new TimelineEvents(); + for (int j = 0; j < 2; ++j) { + TimelineEvents.EventsOfOneEntity partEvents = + new TimelineEvents.EventsOfOneEntity(); + partEvents.setEntityId("entity id " + j); + partEvents.setEntityType("entity type " + j); + for (int i = 0; i < 2; ++i) { + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(System.currentTimeMillis()); + event.setEventType("event type " + i); + event.addEventInfo("key1", "val1"); + event.addEventInfo("key2", "val2"); + partEvents.addEvent(event); + } + events.addEvent(partEvents); + } + LOG.info("Events in JSON:"); + LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true)); + + Assert.assertEquals(2, events.getAllEvents().size()); + TimelineEvents.EventsOfOneEntity partEvents1 = events.getAllEvents().get(0); + Assert.assertEquals("entity id 0", partEvents1.getEntityId()); + Assert.assertEquals("entity type 0", partEvents1.getEntityType()); + Assert.assertEquals(2, partEvents1.getEvents().size()); + TimelineEvent event11 = partEvents1.getEvents().get(0); + Assert.assertEquals("event type 0", event11.getEventType()); + Assert.assertEquals(2, event11.getEventInfo().size()); + TimelineEvent event12 = partEvents1.getEvents().get(1); + Assert.assertEquals("event type 1", event12.getEventType()); + Assert.assertEquals(2, event12.getEventInfo().size()); + TimelineEvents.EventsOfOneEntity partEvents2 = events.getAllEvents().get(1); + Assert.assertEquals("entity id 1", partEvents2.getEntityId()); + Assert.assertEquals("entity type 1", partEvents2.getEntityType()); + Assert.assertEquals(2, partEvents2.getEvents().size()); + TimelineEvent event21 = partEvents2.getEvents().get(0); + Assert.assertEquals("event type 0", event21.getEventType()); + Assert.assertEquals(2, event21.getEventInfo().size()); + TimelineEvent event22 = partEvents2.getEvents().get(1); + Assert.assertEquals("event type 1", event22.getEventType()); + Assert.assertEquals(2, event22.getEventInfo().size()); + } + + @Test + public void testTimelinePutErrors() throws Exception { + TimelinePutResponse TimelinePutErrors = new TimelinePutResponse(); + TimelinePutError error1 = new TimelinePutError(); + error1.setEntityId("entity id 1"); + error1.setEntityId("entity type 1"); + error1.setErrorCode(TimelinePutError.NO_START_TIME); + TimelinePutErrors.addError(error1); + List response = new ArrayList(); + response.add(error1); + TimelinePutError error2 = new TimelinePutError(); + error2.setEntityId("entity id 2"); + error2.setEntityId("entity type 2"); + error2.setErrorCode(TimelinePutError.IO_EXCEPTION); + response.add(error2); + TimelinePutErrors.addErrors(response); + LOG.info("Errors in JSON:"); + LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(TimelinePutErrors, true)); + + Assert.assertEquals(3, TimelinePutErrors.getErrors().size()); + TimelinePutError e = TimelinePutErrors.getErrors().get(0); + Assert.assertEquals(error1.getEntityId(), e.getEntityId()); + Assert.assertEquals(error1.getEntityType(), e.getEntityType()); + Assert.assertEquals(error1.getErrorCode(), e.getErrorCode()); + e = TimelinePutErrors.getErrors().get(1); + Assert.assertEquals(error1.getEntityId(), e.getEntityId()); + Assert.assertEquals(error1.getEntityType(), e.getEntityType()); + Assert.assertEquals(error1.getErrorCode(), e.getErrorCode()); + e = TimelinePutErrors.getErrors().get(2); + Assert.assertEquals(error2.getEntityId(), e.getEntityId()); + Assert.assertEquals(error2.getEntityType(), e.getEntityType()); + Assert.assertEquals(error2.getErrorCode(), e.getErrorCode()); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index 73a0941..4d7cdf8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -33,8 +33,8 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.LeveldbApplicationTimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore; import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; @@ -54,7 +54,7 @@ ApplicationHistoryClientService ahsClientService; ApplicationHistoryManager historyManager; - ApplicationTimelineStore timelineStore; + TimelineStore timelineStore; private WebApp webApp; public ApplicationHistoryServer() { @@ -67,7 +67,7 @@ protected void serviceInit(Configuration conf) throws Exception { ahsClientService = createApplicationHistoryClientService(historyManager); addService(ahsClientService); addService((Service) historyManager); - timelineStore = createApplicationTimelineStore(conf); + timelineStore = createTimelineStore(conf); addIfService(timelineStore); super.serviceInit(conf); } @@ -141,11 +141,11 @@ protected ApplicationHistoryManager createApplicationHistoryManager( return new ApplicationHistoryManagerImpl(); } - protected ApplicationTimelineStore createApplicationTimelineStore( + protected TimelineStore createTimelineStore( Configuration conf) { return ReflectionUtils.newInstance(conf.getClass( - YarnConfiguration.ATS_STORE, LeveldbApplicationTimelineStore.class, - ApplicationTimelineStore.class), conf); + YarnConfiguration.TIMELINE_SERVICE_STORE, LeveldbTimelineStore.class, + TimelineStore.class), conf); } protected void startWebApp() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java deleted file mode 100644 index e448ba8..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import java.io.IOException; -import java.util.Collection; -import java.util.EnumSet; -import java.util.Set; -import java.util.SortedSet; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents; - -/** - * This interface is for retrieving application timeline information. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface ApplicationTimelineReader { - - /** - * Possible fields to retrieve for {@link #getEntities} and {@link - * #getEntity}. - */ - enum Field { - EVENTS, - RELATED_ENTITIES, - PRIMARY_FILTERS, - OTHER_INFO, - LAST_EVENT_ONLY - } - - /** - * Default limit for {@link #getEntities} and {@link #getEntityTimelines}. - */ - final long DEFAULT_LIMIT = 100; - - /** - * This method retrieves a list of entity information, {@link ATSEntity}, - * sorted by the starting timestamp for the entity, descending. - * - * @param entityType The type of entities to return (required). - * @param limit A limit on the number of entities to return. If null, - * defaults to {@link #DEFAULT_LIMIT}. - * @param windowStart The earliest start timestamp to retrieve (exclusive). - * If null, defaults to retrieving all entities until the - * limit is reached. - * @param windowEnd The latest start timestamp to retrieve (inclusive). - * If null, defaults to {@link Long#MAX_VALUE} - * @param primaryFilter Retrieves only entities that have the specified - * primary filter. If null, retrieves all entities. - * This is an indexed retrieval, and no entities that - * do not match the filter are scanned. - * @param secondaryFilters Retrieves only entities that have exact matches - * for all the specified filters in their primary - * filters or other info. This is not an indexed - * retrieval, so all entities are scanned but only - * those matching the filters are returned. - * @param fieldsToRetrieve Specifies which fields of the entity object to - * retrieve (see {@link Field}). If the set of fields - * contains {@link Field#LAST_EVENT_ONLY} and not - * {@link Field#EVENTS}, the most recent event for - * each entity is retrieved. If null, retrieves all - * fields. - * @return An {@link ATSEntities} object. - * @throws IOException - */ - ATSEntities getEntities(String entityType, - Long limit, Long windowStart, Long windowEnd, - NameValuePair primaryFilter, Collection secondaryFilters, - EnumSet fieldsToRetrieve) throws IOException; - - /** - * This method retrieves the entity information for a given entity. - * - * @param entity The entity whose information will be retrieved. - * @param entityType The type of the entity. - * @param fieldsToRetrieve Specifies which fields of the entity object to - * retrieve (see {@link Field}). If the set of - * fields contains {@link Field#LAST_EVENT_ONLY} and - * not {@link Field#EVENTS}, the most recent event - * for each entity is retrieved. If null, retrieves - * all fields. - * @return An {@link ATSEntity} object. - * @throws IOException - */ - ATSEntity getEntity(String entity, String entityType, EnumSet - fieldsToRetrieve) throws IOException; - - /** - * This method retrieves the events for a list of entities all of the same - * entity type. The events for each entity are sorted in order of their - * timestamps, descending. - * - * @param entityType The type of entities to retrieve events for. - * @param entityIds The entity IDs to retrieve events for. - * @param limit A limit on the number of events to return for each entity. - * If null, defaults to {@link #DEFAULT_LIMIT} events per - * entity. - * @param windowStart If not null, retrieves only events later than the - * given time (exclusive) - * @param windowEnd If not null, retrieves only events earlier than the - * given time (inclusive) - * @param eventTypes Restricts the events returned to the given types. If - * null, events of all types will be returned. - * @return An {@link ATSEvents} object. - * @throws IOException - */ - ATSEvents getEntityTimelines(String entityType, - SortedSet entityIds, Long limit, Long windowStart, - Long windowEnd, Set eventTypes) throws IOException; -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStore.java deleted file mode 100644 index b231418..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStore.java +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.service.Service; - -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface ApplicationTimelineStore extends - Service, ApplicationTimelineReader, ApplicationTimelineWriter { -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java deleted file mode 100644 index 2a16833..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; - -import java.io.IOException; - -/** - * This interface is for storing application timeline information. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface ApplicationTimelineWriter { - - /** - * Stores entity information to the application timeline store. Any errors - * occurring for individual put request objects will be reported in the - * response. - * - * @param data An {@link ATSEntities} object. - * @return An {@link ATSPutErrors} object. - * @throws IOException - */ - ATSPutErrors put(ATSEntities data) throws IOException; - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java deleted file mode 100644 index d22e616..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -/** - * The unique identifier for an entity - */ -@Private -@Unstable -public class EntityIdentifier implements Comparable { - - private String id; - private String type; - - public EntityIdentifier(String id, String type) { - this.id = id; - this.type = type; - } - - /** - * Get the entity Id. - * @return The entity Id. - */ - public String getId() { - return id; - } - - /** - * Get the entity type. - * @return The entity type. - */ - public String getType() { - return type; - } - - @Override - public int compareTo(EntityIdentifier other) { - int c = type.compareTo(other.type); - if (c != 0) return c; - return id.compareTo(other.id); - } - - @Override - public int hashCode() { - // generated by eclipse - final int prime = 31; - int result = 1; - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - // generated by eclipse - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - EntityIdentifier other = (EntityIdentifier) obj; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equals(other.type)) - return false; - return true; - } - - @Override - public String toString() { - return "{ id: " + id + ", type: "+ type + " }"; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java deleted file mode 100644 index 3281a32..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java +++ /dev/null @@ -1,222 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.io.WritableUtils; -import org.codehaus.jackson.map.ObjectMapper; - -/** - * A utility class providing methods for serializing and deserializing - * objects. The {@link #write(Object)}, {@link #read(byte[])} and {@link - * #write(java.io.DataOutputStream, Object)}, {@link - * #read(java.io.DataInputStream)} methods are used by the - * {@link LeveldbApplicationTimelineStore} to store and retrieve arbitrary - * JSON, while the {@link #writeReverseOrderedLong} and {@link - * #readReverseOrderedLong} methods are used to sort entities in descending - * start time order. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class GenericObjectMapper { - private static final byte[] EMPTY_BYTES = new byte[0]; - - private static final byte LONG = 0x1; - private static final byte INTEGER = 0x2; - private static final byte DOUBLE = 0x3; - private static final byte STRING = 0x4; - private static final byte BOOLEAN = 0x5; - private static final byte LIST = 0x6; - private static final byte MAP = 0x7; - - /** - * Serializes an Object into a byte array. Along with {@link #read(byte[]) }, - * can be used to serialize an Object and deserialize it into an Object of - * the same type without needing to specify the Object's type, - * as long as it is one of the JSON-compatible objects Long, Integer, - * Double, String, Boolean, List, or Map. The current implementation uses - * ObjectMapper to serialize complex objects (List and Map) while using - * Writable to serialize simpler objects, to produce fewer bytes. - * - * @param o An Object - * @return A byte array representation of the Object - * @throws IOException - */ - public static byte[] write(Object o) throws IOException { - if (o == null) - return EMPTY_BYTES; - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - write(new DataOutputStream(baos), o); - return baos.toByteArray(); - } - - /** - * Serializes an Object and writes it to a DataOutputStream. Along with - * {@link #read(java.io.DataInputStream)}, can be used to serialize an Object - * and deserialize it into an Object of the same type without needing to - * specify the Object's type, as long as it is one of the JSON-compatible - * objects Long, Integer, Double, String, Boolean, List, or Map. The current - * implementation uses ObjectMapper to serialize complex objects (List and - * Map) while using Writable to serialize simpler objects, to produce fewer - * bytes. - * - * @param dos A DataOutputStream - * @param o An Object - * @throws IOException - */ - public static void write(DataOutputStream dos, Object o) - throws IOException { - if (o == null) - return; - if (o instanceof Long) { - dos.write(LONG); - WritableUtils.writeVLong(dos, (Long) o); - } else if(o instanceof Integer) { - dos.write(INTEGER); - WritableUtils.writeVInt(dos, (Integer) o); - } else if(o instanceof Double) { - dos.write(DOUBLE); - dos.writeDouble((Double) o); - } else if (o instanceof String) { - dos.write(STRING); - WritableUtils.writeString(dos, (String) o); - } else if (o instanceof Boolean) { - dos.write(BOOLEAN); - dos.writeBoolean((Boolean) o); - } else if (o instanceof List) { - dos.write(LIST); - ObjectMapper mapper = new ObjectMapper(); - mapper.writeValue(dos, o); - } else if (o instanceof Map) { - dos.write(MAP); - ObjectMapper mapper = new ObjectMapper(); - mapper.writeValue(dos, o); - } else { - throw new IOException("Couldn't serialize object"); - } - } - - /** - * Deserializes an Object from a byte array created with - * {@link #write(Object)}. - * - * @param b A byte array - * @return An Object - * @throws IOException - */ - public static Object read(byte[] b) throws IOException { - return read(b, 0); - } - - /** - * Deserializes an Object from a byte array at a specified offset, assuming - * the bytes were created with {@link #write(Object)}. - * - * @param b A byte array - * @param offset Offset into the array - * @return An Object - * @throws IOException - */ - public static Object read(byte[] b, int offset) throws IOException { - if (b == null || b.length == 0) { - return null; - } - ByteArrayInputStream bais = new ByteArrayInputStream(b, offset, - b.length - offset); - return read(new DataInputStream(bais)); - } - - /** - * Reads an Object from a DataInputStream whose data has been written with - * {@link #write(java.io.DataOutputStream, Object)}. - * - * @param dis A DataInputStream - * @return An Object, null if an unrecognized type - * @throws IOException - */ - public static Object read(DataInputStream dis) throws IOException { - byte code = (byte)dis.read(); - ObjectMapper mapper; - switch (code) { - case LONG: - return WritableUtils.readVLong(dis); - case INTEGER: - return WritableUtils.readVInt(dis); - case DOUBLE: - return dis.readDouble(); - case STRING: - return WritableUtils.readString(dis); - case BOOLEAN: - return dis.readBoolean(); - case LIST: - mapper = new ObjectMapper(); - return mapper.readValue(dis, ArrayList.class); - case MAP: - mapper = new ObjectMapper(); - return mapper.readValue(dis, HashMap.class); - default: - return null; - } - } - - /** - * Converts a long to a 8-byte array so that lexicographic ordering of the - * produced byte arrays sort the longs in descending order. - * - * @param l A long - * @return A byte array - */ - public static byte[] writeReverseOrderedLong(long l) { - byte[] b = new byte[8]; - b[0] = (byte)(0x7f ^ ((l >> 56) & 0xff)); - for (int i = 1; i < 7; i++) - b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff)); - b[7] = (byte)(0xff ^ (l & 0xff)); - return b; - } - - /** - * Reads 8 bytes from an array starting at the specified offset and - * converts them to a long. The bytes are assumed to have been created - * with {@link #writeReverseOrderedLong}. - * - * @param b A byte array - * @param offset An offset into the byte array - * @return A long - */ - public static long readReverseOrderedLong(byte[] b, int offset) { - long l = b[offset] & 0xff; - for (int i = 1; i < 8; i++) { - l = l << 8; - l = l | (b[offset+i]&0xff); - } - return l ^ 0x7fffffffffffffffl; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java deleted file mode 100644 index 7f4d838..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java +++ /dev/null @@ -1,875 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeMap; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.collections.map.LRUMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.WritableComparator; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.fusesource.leveldbjni.JniDBFactory; -import org.iq80.leveldb.DB; -import org.iq80.leveldb.DBIterator; -import org.iq80.leveldb.Options; -import org.iq80.leveldb.WriteBatch; - -import static org.apache.hadoop.yarn.server.applicationhistoryservice - .apptimeline.GenericObjectMapper.readReverseOrderedLong; -import static org.apache.hadoop.yarn.server.applicationhistoryservice - .apptimeline.GenericObjectMapper.writeReverseOrderedLong; - -/** - * An implementation of an application timeline store backed by leveldb. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class LeveldbApplicationTimelineStore extends AbstractService - implements ApplicationTimelineStore { - private static final Log LOG = LogFactory - .getLog(LeveldbApplicationTimelineStore.class); - - private static final String FILENAME = "leveldb-apptimeline-store.ldb"; - - private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(); - private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(); - private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(); - - private static final byte[] PRIMARY_FILTER_COLUMN = "f".getBytes(); - private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(); - private static final byte[] RELATED_COLUMN = "r".getBytes(); - private static final byte[] TIME_COLUMN = "t".getBytes(); - - private static final byte[] EMPTY_BYTES = new byte[0]; - - private static final int START_TIME_CACHE_SIZE = 10000; - - @SuppressWarnings("unchecked") - private final Map startTimeCache = - Collections.synchronizedMap(new LRUMap(START_TIME_CACHE_SIZE)); - - private DB db; - - public LeveldbApplicationTimelineStore() { - super(LeveldbApplicationTimelineStore.class.getName()); - } - - @Override - protected void serviceInit(Configuration conf) throws Exception { - Options options = new Options(); - options.createIfMissing(true); - JniDBFactory factory = new JniDBFactory(); - String path = conf.get(YarnConfiguration.ATS_LEVELDB_PATH_PROPERTY); - File p = new File(path); - if (!p.exists()) - if (!p.mkdirs()) - throw new IOException("Couldn't create directory for leveldb " + - "application timeline store " + path); - LOG.info("Using leveldb path " + path); - db = factory.open(new File(path, FILENAME), options); - super.serviceInit(conf); - } - - @Override - protected void serviceStop() throws Exception { - IOUtils.cleanup(LOG, db); - super.serviceStop(); - } - - private static class KeyBuilder { - private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10; - private byte[][] b; - private boolean[] useSeparator; - private int index; - private int length; - - public KeyBuilder(int size) { - b = new byte[size][]; - useSeparator = new boolean[size]; - index = 0; - length = 0; - } - - public static KeyBuilder newInstance() { - return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS); - } - - public KeyBuilder add(String s) { - return add(s.getBytes(), true); - } - - public KeyBuilder add(byte[] t) { - return add(t, false); - } - - public KeyBuilder add(byte[] t, boolean sep) { - b[index] = t; - useSeparator[index] = sep; - length += t.length; - if (sep) - length++; - index++; - return this; - } - - public byte[] getBytes() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(length); - for (int i = 0; i < index; i++) { - baos.write(b[i]); - if (i < index-1 && useSeparator[i]) - baos.write(0x0); - } - return baos.toByteArray(); - } - - public byte[] getBytesForLookup() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(length); - for (int i = 0; i < index; i++) { - baos.write(b[i]); - if (useSeparator[i]) - baos.write(0x0); - } - return baos.toByteArray(); - } - } - - private static class KeyParser { - private final byte[] b; - private int offset; - - public KeyParser(byte[] b, int offset) { - this.b = b; - this.offset = offset; - } - - public String getNextString() throws IOException { - if (offset >= b.length) - throw new IOException( - "tried to read nonexistent string from byte array"); - int i = 0; - while (offset+i < b.length && b[offset+i] != 0x0) - i++; - String s = new String(b, offset, i); - offset = offset + i + 1; - return s; - } - - public long getNextLong() throws IOException { - if (offset+8 >= b.length) - throw new IOException("byte array ran out when trying to read long"); - long l = readReverseOrderedLong(b, offset); - offset += 8; - return l; - } - - public int getOffset() { - return offset; - } - } - - @Override - public ATSEntity getEntity(String entity, String entityType, - EnumSet fields) throws IOException { - DBIterator iterator = null; - try { - byte[] revStartTime = getStartTime(entity, entityType, null, null, null); - if (revStartTime == null) - return null; - byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).add(revStartTime).add(entity).getBytesForLookup(); - - iterator = db.iterator(); - iterator.seek(prefix); - - return getEntity(entity, entityType, - readReverseOrderedLong(revStartTime, 0), fields, iterator, prefix, - prefix.length); - } finally { - IOUtils.cleanup(LOG, iterator); - } - } - - /** - * Read entity from a db iterator. If no information is found in the - * specified fields for this entity, return null. - */ - private static ATSEntity getEntity(String entity, String entityType, - Long startTime, EnumSet fields, DBIterator iterator, - byte[] prefix, int prefixlen) throws IOException { - if (fields == null) - fields = EnumSet.allOf(Field.class); - - ATSEntity atsEntity = new ATSEntity(); - boolean events = false; - boolean lastEvent = false; - if (fields.contains(Field.EVENTS)) { - events = true; - atsEntity.setEvents(new ArrayList()); - } else if (fields.contains(Field.LAST_EVENT_ONLY)) { - lastEvent = true; - atsEntity.setEvents(new ArrayList()); - } - else { - atsEntity.setEvents(null); - } - boolean relatedEntities = false; - if (fields.contains(Field.RELATED_ENTITIES)) { - relatedEntities = true; - } else { - atsEntity.setRelatedEntities(null); - } - boolean primaryFilters = false; - if (fields.contains(Field.PRIMARY_FILTERS)) { - primaryFilters = true; - } else { - atsEntity.setPrimaryFilters(null); - } - boolean otherInfo = false; - if (fields.contains(Field.OTHER_INFO)) { - otherInfo = true; - atsEntity.setOtherInfo(new HashMap()); - } else { - atsEntity.setOtherInfo(null); - } - - // iterate through the entity's entry, parsing information if it is part - // of a requested field - for (; iterator.hasNext(); iterator.next()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefixlen, key)) - break; - if (key[prefixlen] == PRIMARY_FILTER_COLUMN[0]) { - if (primaryFilters) { - addPrimaryFilter(atsEntity, key, - prefixlen + PRIMARY_FILTER_COLUMN.length); - } - } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) { - if (otherInfo) { - atsEntity.addOtherInfo(parseRemainingKey(key, - prefixlen + OTHER_INFO_COLUMN.length), - GenericObjectMapper.read(iterator.peekNext().getValue())); - } - } else if (key[prefixlen] == RELATED_COLUMN[0]) { - if (relatedEntities) { - addRelatedEntity(atsEntity, key, - prefixlen + RELATED_COLUMN.length); - } - } else if (key[prefixlen] == TIME_COLUMN[0]) { - if (events || (lastEvent && atsEntity.getEvents().size() == 0)) { - ATSEvent event = getEntityEvent(null, key, prefixlen + - TIME_COLUMN.length, iterator.peekNext().getValue()); - if (event != null) { - atsEntity.addEvent(event); - } - } - } else { - LOG.warn(String.format("Found unexpected column for entity %s of " + - "type %s (0x%02x)", entity, entityType, key[prefixlen])); - } - } - - atsEntity.setEntityId(entity); - atsEntity.setEntityType(entityType); - atsEntity.setStartTime(startTime); - - return atsEntity; - } - - @Override - public ATSEvents getEntityTimelines(String entityType, - SortedSet entityIds, Long limit, Long windowStart, - Long windowEnd, Set eventType) throws IOException { - ATSEvents atsEvents = new ATSEvents(); - if (entityIds == null || entityIds.isEmpty()) - return atsEvents; - // create a lexicographically-ordered map from start time to entities - Map> startTimeMap = new TreeMap>(new Comparator() { - @Override - public int compare(byte[] o1, byte[] o2) { - return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, - o2.length); - } - }); - DBIterator iterator = null; - try { - // look up start times for the specified entities - // skip entities with no start time - for (String entity : entityIds) { - byte[] startTime = getStartTime(entity, entityType, null, null, null); - if (startTime != null) { - List entities = startTimeMap.get(startTime); - if (entities == null) { - entities = new ArrayList(); - startTimeMap.put(startTime, entities); - } - entities.add(new EntityIdentifier(entity, entityType)); - } - } - for (Entry> entry : - startTimeMap.entrySet()) { - // look up the events matching the given parameters (limit, - // start time, end time, event types) for entities whose start times - // were found and add the entities to the return list - byte[] revStartTime = entry.getKey(); - for (EntityIdentifier entity : entry.getValue()) { - ATSEventsOfOneEntity atsEntity = new ATSEventsOfOneEntity(); - atsEntity.setEntityId(entity.getId()); - atsEntity.setEntityType(entityType); - atsEvents.addEvent(atsEntity); - KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).add(revStartTime).add(entity.getId()) - .add(TIME_COLUMN); - byte[] prefix = kb.getBytesForLookup(); - if (windowEnd == null) { - windowEnd = Long.MAX_VALUE; - } - byte[] revts = writeReverseOrderedLong(windowEnd); - kb.add(revts); - byte[] first = kb.getBytesForLookup(); - byte[] last = null; - if (windowStart != null) { - last = KeyBuilder.newInstance().add(prefix) - .add(writeReverseOrderedLong(windowStart)).getBytesForLookup(); - } - if (limit == null) { - limit = DEFAULT_LIMIT; - } - iterator = db.iterator(); - for (iterator.seek(first); atsEntity.getEvents().size() < limit && - iterator.hasNext(); iterator.next()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefix.length, key) || (last != null && - WritableComparator.compareBytes(key, 0, key.length, last, 0, - last.length) > 0)) - break; - ATSEvent event = getEntityEvent(eventType, key, prefix.length, - iterator.peekNext().getValue()); - if (event != null) - atsEntity.addEvent(event); - } - } - } - } finally { - IOUtils.cleanup(LOG, iterator); - } - return atsEvents; - } - - /** - * Returns true if the byte array begins with the specified prefix. - */ - private static boolean prefixMatches(byte[] prefix, int prefixlen, - byte[] b) { - if (b.length < prefixlen) - return false; - return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0, - prefixlen) == 0; - } - - @Override - public ATSEntities getEntities(String entityType, - Long limit, Long windowStart, Long windowEnd, - NameValuePair primaryFilter, Collection secondaryFilters, - EnumSet fields) throws IOException { - if (primaryFilter == null) { - // if no primary filter is specified, prefix the lookup with - // ENTITY_ENTRY_PREFIX - return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit, - windowStart, windowEnd, secondaryFilters, fields); - } else { - // if a primary filter is specified, prefix the lookup with - // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue + - // ENTITY_ENTRY_PREFIX - byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) - .add(primaryFilter.getName()) - .add(GenericObjectMapper.write(primaryFilter.getValue()), true) - .add(ENTITY_ENTRY_PREFIX).getBytesForLookup(); - return getEntityByTime(base, entityType, limit, windowStart, windowEnd, - secondaryFilters, fields); - } - } - - /** - * Retrieves a list of entities satisfying given parameters. - * - * @param base A byte array prefix for the lookup - * @param entityType The type of the entity - * @param limit A limit on the number of entities to return - * @param starttime The earliest entity start time to retrieve (exclusive) - * @param endtime The latest entity start time to retrieve (inclusive) - * @param secondaryFilters Filter pairs that the entities should match - * @param fields The set of fields to retrieve - * @return A list of entities - * @throws IOException - */ - private ATSEntities getEntityByTime(byte[] base, - String entityType, Long limit, Long starttime, Long endtime, - Collection secondaryFilters, EnumSet fields) - throws IOException { - DBIterator iterator = null; - try { - KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); - // only db keys matching the prefix (base + entity type) will be parsed - byte[] prefix = kb.getBytesForLookup(); - if (endtime == null) { - // if end time is null, place no restriction on end time - endtime = Long.MAX_VALUE; - } - // using end time, construct a first key that will be seeked to - byte[] revts = writeReverseOrderedLong(endtime); - kb.add(revts); - byte[] first = kb.getBytesForLookup(); - byte[] last = null; - if (starttime != null) { - // if start time is not null, set a last key that will not be - // iterated past - last = KeyBuilder.newInstance().add(base).add(entityType) - .add(writeReverseOrderedLong(starttime)).getBytesForLookup(); - } - if (limit == null) { - // if limit is not specified, use the default - limit = DEFAULT_LIMIT; - } - - ATSEntities atsEntities = new ATSEntities(); - iterator = db.iterator(); - iterator.seek(first); - // iterate until one of the following conditions is met: limit is - // reached, there are no more keys, the key prefix no longer matches, - // or a start time has been specified and reached/exceeded - while (atsEntities.getEntities().size() < limit && iterator.hasNext()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefix.length, key) || (last != null && - WritableComparator.compareBytes(key, 0, key.length, last, 0, - last.length) > 0)) - break; - // read the start time and entity from the current key - KeyParser kp = new KeyParser(key, prefix.length); - Long startTime = kp.getNextLong(); - String entity = kp.getNextString(); - // parse the entity that owns this key, iterating over all keys for - // the entity - ATSEntity atsEntity = getEntity(entity, entityType, startTime, - fields, iterator, key, kp.getOffset()); - if (atsEntity == null) - continue; - // determine if the retrieved entity matches the provided secondary - // filters, and if so add it to the list of entities to return - boolean filterPassed = true; - if (secondaryFilters != null) { - for (NameValuePair filter : secondaryFilters) { - Object v = atsEntity.getOtherInfo().get(filter.getName()); - if (v == null) { - Set vs = atsEntity.getPrimaryFilters() - .get(filter.getName()); - if (vs != null && !vs.contains(filter.getValue())) { - filterPassed = false; - break; - } - } else if (!v.equals(filter.getValue())) { - filterPassed = false; - break; - } - } - } - if (filterPassed) - atsEntities.addEntity(atsEntity); - } - return atsEntities; - } finally { - IOUtils.cleanup(LOG, iterator); - } - } - - /** - * Put a single entity. If there is an error, add a PutError to the given - * response. - */ - private void put(ATSEntity atsEntity, ATSPutErrors response) { - WriteBatch writeBatch = null; - try { - writeBatch = db.createWriteBatch(); - List events = atsEntity.getEvents(); - // look up the start time for the entity - byte[] revStartTime = getStartTime(atsEntity.getEntityId(), - atsEntity.getEntityType(), atsEntity.getStartTime(), events, - writeBatch); - if (revStartTime == null) { - // if no start time is found, add an error and return - ATSPutError error = new ATSPutError(); - error.setEntityId(atsEntity.getEntityId()); - error.setEntityType(atsEntity.getEntityType()); - error.setErrorCode(ATSPutError.NO_START_TIME); - response.addError(error); - return; - } - Long revStartTimeLong = readReverseOrderedLong(revStartTime, 0); - Map> primaryFilters = atsEntity.getPrimaryFilters(); - - // write event entries - if (events != null && !events.isEmpty()) { - for (ATSEvent event : events) { - byte[] revts = writeReverseOrderedLong(event.getTimestamp()); - byte[] key = createEntityEventKey(atsEntity.getEntityId(), - atsEntity.getEntityType(), revStartTime, revts, - event.getEventType()); - byte[] value = GenericObjectMapper.write(event.getEventInfo()); - writeBatch.put(key, value); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); - } - } - - // write related entity entries - Map> relatedEntities = - atsEntity.getRelatedEntities(); - if (relatedEntities != null && !relatedEntities.isEmpty()) { - for (Entry> relatedEntityList : - relatedEntities.entrySet()) { - String relatedEntityType = relatedEntityList.getKey(); - for (String relatedEntityId : relatedEntityList.getValue()) { - // look up start time of related entity - byte[] relatedEntityStartTime = getStartTime(relatedEntityId, - relatedEntityType, null, null, writeBatch); - if (relatedEntityStartTime == null) { - // if start time is not found, set start time of the related - // entity to the start time of this entity, and write it to the - // db and the cache - relatedEntityStartTime = revStartTime; - writeBatch.put(createStartTimeLookupKey(relatedEntityId, - relatedEntityType), relatedEntityStartTime); - startTimeCache.put(new EntityIdentifier(relatedEntityId, - relatedEntityType), revStartTimeLong); - } - // write reverse entry (related entity -> entity) - byte[] key = createReleatedEntityKey(relatedEntityId, - relatedEntityType, relatedEntityStartTime, - atsEntity.getEntityId(), atsEntity.getEntityType()); - writeBatch.put(key, EMPTY_BYTES); - // TODO: write forward entry (entity -> related entity)? - } - } - } - - // write primary filter entries - if (primaryFilters != null && !primaryFilters.isEmpty()) { - for (Entry> primaryFilter : - primaryFilters.entrySet()) { - for (Object primaryFilterValue : primaryFilter.getValue()) { - byte[] key = createPrimaryFilterKey(atsEntity.getEntityId(), - atsEntity.getEntityType(), revStartTime, - primaryFilter.getKey(), primaryFilterValue); - writeBatch.put(key, EMPTY_BYTES); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, - EMPTY_BYTES); - } - } - } - - // write other info entries - Map otherInfo = atsEntity.getOtherInfo(); - if (otherInfo != null && !otherInfo.isEmpty()) { - for (Entry i : otherInfo.entrySet()) { - byte[] key = createOtherInfoKey(atsEntity.getEntityId(), - atsEntity.getEntityType(), revStartTime, i.getKey()); - byte[] value = GenericObjectMapper.write(i.getValue()); - writeBatch.put(key, value); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); - } - } - db.write(writeBatch); - } catch (IOException e) { - LOG.error("Error putting entity " + atsEntity.getEntityId() + - " of type " + atsEntity.getEntityType(), e); - ATSPutError error = new ATSPutError(); - error.setEntityId(atsEntity.getEntityId()); - error.setEntityType(atsEntity.getEntityType()); - error.setErrorCode(ATSPutError.IO_EXCEPTION); - response.addError(error); - } finally { - IOUtils.cleanup(LOG, writeBatch); - } - } - - /** - * For a given key / value pair that has been written to the db, - * write additional entries to the db for each primary filter. - */ - private static void writePrimaryFilterEntries(WriteBatch writeBatch, - Map> primaryFilters, byte[] key, byte[] value) - throws IOException { - if (primaryFilters != null && !primaryFilters.isEmpty()) { - for (Entry> pf : primaryFilters.entrySet()) { - for (Object pfval : pf.getValue()) { - writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval, - key), value); - } - } - } - } - - @Override - public ATSPutErrors put(ATSEntities atsEntities) { - ATSPutErrors response = new ATSPutErrors(); - for (ATSEntity atsEntity : atsEntities.getEntities()) { - put(atsEntity, response); - } - return response; - } - - /** - * Get the unique start time for a given entity as a byte array that sorts - * the timestamps in reverse order (see {@link - * GenericObjectMapper#writeReverseOrderedLong(long)}). - * - * @param entityId The id of the entity - * @param entityType The type of the entity - * @param startTime The start time of the entity, or null - * @param events A list of events for the entity, or null - * @param writeBatch A leveldb write batch, if the method is called by a - * put as opposed to a get - * @return A byte array - * @throws IOException - */ - private byte[] getStartTime(String entityId, String entityType, - Long startTime, List events, WriteBatch writeBatch) - throws IOException { - EntityIdentifier entity = new EntityIdentifier(entityId, entityType); - if (startTime == null) { - // start time is not provided, so try to look it up - if (startTimeCache.containsKey(entity)) { - // found the start time in the cache - startTime = startTimeCache.get(entity); - } else { - // try to look up the start time in the db - byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); - byte[] v = db.get(b); - if (v == null) { - // did not find the start time in the db - // if this is a put, try to set it from the provided events - if (events == null || writeBatch == null) { - // no events, or not a put, so return null - return null; - } - Long min = Long.MAX_VALUE; - for (ATSEvent e : events) - if (min > e.getTimestamp()) - min = e.getTimestamp(); - startTime = min; - // selected start time as minimum timestamp of provided events - // write start time to db and cache - writeBatch.put(b, writeReverseOrderedLong(startTime)); - startTimeCache.put(entity, startTime); - } else { - // found the start time in the db - startTime = readReverseOrderedLong(v, 0); - if (writeBatch != null) { - // if this is a put, re-add the start time to the cache - startTimeCache.put(entity, startTime); - } - } - } - } else { - // start time is provided - // TODO: verify start time in db as well as cache? - if (startTimeCache.containsKey(entity)) { - // if the start time is already in the cache, - // and it is different from the provided start time, - // use the one from the cache - if (!startTime.equals(startTimeCache.get(entity))) - startTime = startTimeCache.get(entity); - } else if (writeBatch != null) { - // if this is a put, write the provided start time to the db and the - // cache - byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); - writeBatch.put(b, writeReverseOrderedLong(startTime)); - startTimeCache.put(entity, startTime); - } - } - return writeReverseOrderedLong(startTime); - } - - /** - * Creates a key for looking up the start time of a given entity, - * of the form START_TIME_LOOKUP_PREFIX + entitytype + entity. - */ - private static byte[] createStartTimeLookupKey(String entity, - String entitytype) throws IOException { - return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX) - .add(entitytype).add(entity).getBytes(); - } - - /** - * Creates an index entry for the given key of the form - * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key. - */ - private static byte[] addPrimaryFilterToKey(String primaryFilterName, - Object primaryFilterValue, byte[] key) throws IOException { - return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) - .add(primaryFilterName) - .add(GenericObjectMapper.write(primaryFilterValue), true).add(key) - .getBytes(); - } - - /** - * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entitytype + - * revstarttime + entity + TIME_COLUMN + reveventtimestamp + eventtype. - */ - private static byte[] createEntityEventKey(String entity, String entitytype, - byte[] revStartTime, byte[] reveventtimestamp, String eventtype) - throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entitytype).add(revStartTime).add(entity).add(TIME_COLUMN) - .add(reveventtimestamp).add(eventtype).getBytes(); - } - - /** - * Creates an event object from the given key, offset, and value. If the - * event type is not contained in the specified set of event types, - * returns null. - */ - private static ATSEvent getEntityEvent(Set eventTypes, byte[] key, - int offset, byte[] value) throws IOException { - KeyParser kp = new KeyParser(key, offset); - long ts = kp.getNextLong(); - String tstype = kp.getNextString(); - if (eventTypes == null || eventTypes.contains(tstype)) { - ATSEvent event = new ATSEvent(); - event.setTimestamp(ts); - event.setEventType(tstype); - Object o = GenericObjectMapper.read(value); - if (o == null) { - event.setEventInfo(null); - } else if (o instanceof Map) { - @SuppressWarnings("unchecked") - Map m = (Map) o; - event.setEventInfo(m); - } else { - throw new IOException("Couldn't deserialize event info map"); - } - return event; - } - return null; - } - - /** - * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX + - * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name + value. - */ - private static byte[] createPrimaryFilterKey(String entity, - String entitytype, byte[] revStartTime, String name, Object value) - throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) - .add(revStartTime).add(entity).add(PRIMARY_FILTER_COLUMN).add(name) - .add(GenericObjectMapper.write(value)).getBytes(); - } - - /** - * Parses the primary filter from the given key at the given offset and - * adds it to the given entity. - */ - private static void addPrimaryFilter(ATSEntity atsEntity, byte[] key, - int offset) throws IOException { - KeyParser kp = new KeyParser(key, offset); - String name = kp.getNextString(); - Object value = GenericObjectMapper.read(key, kp.getOffset()); - atsEntity.addPrimaryFilter(name, value); - } - - /** - * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entitytype + - * revstarttime + entity + OTHER_INFO_COLUMN + name. - */ - private static byte[] createOtherInfoKey(String entity, String entitytype, - byte[] revStartTime, String name) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) - .add(revStartTime).add(entity).add(OTHER_INFO_COLUMN).add(name) - .getBytes(); - } - - /** - * Creates a string representation of the byte array from the given offset - * to the end of the array (for parsing other info keys). - */ - private static String parseRemainingKey(byte[] b, int offset) { - return new String(b, offset, b.length - offset); - } - - /** - * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX + - * entitytype + revstarttime + entity + RELATED_COLUMN + relatedentitytype + - * relatedentity. - */ - private static byte[] createReleatedEntityKey(String entity, - String entitytype, byte[] revStartTime, String relatedEntity, - String relatedEntityType) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) - .add(revStartTime).add(entity).add(RELATED_COLUMN) - .add(relatedEntityType).add(relatedEntity).getBytes(); - } - - /** - * Parses the related entity from the given key at the given offset and - * adds it to the given entity. - */ - private static void addRelatedEntity(ATSEntity atsEntity, byte[] key, - int offset) throws IOException { - KeyParser kp = new KeyParser(key, offset); - String type = kp.getNextString(); - String id = kp.getNextString(); - atsEntity.addRelatedEntity(type, id); - } - - /** - * Clears the cache to test reloading start times from leveldb (only for - * testing). - */ - @VisibleForTesting - void clearStartTimeCache() { - startTimeCache.clear(); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java deleted file mode 100644 index e3a35a0..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java +++ /dev/null @@ -1,306 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.PriorityQueue; -import java.util.Set; -import java.util.SortedSet; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; - -/** - * In-memory implementation of {@link ApplicationTimelineStore}. This - * implementation is for test purpose only. If users improperly instantiate it, - * they may encounter reading and writing history data in different memory - * store. - * - */ -@Private -@Unstable -public class MemoryApplicationTimelineStore - extends AbstractService implements ApplicationTimelineStore { - - private Map entities = - new HashMap(); - - public MemoryApplicationTimelineStore() { - super(MemoryApplicationTimelineStore.class.getName()); - } - - @Override - public ATSEntities getEntities(String entityType, Long limit, - Long windowStart, Long windowEnd, NameValuePair primaryFilter, - Collection secondaryFilters, EnumSet fields) { - if (limit == null) { - limit = DEFAULT_LIMIT; - } - if (windowStart == null) { - windowStart = Long.MIN_VALUE; - } - if (windowEnd == null) { - windowEnd = Long.MAX_VALUE; - } - if (fields == null) { - fields = EnumSet.allOf(Field.class); - } - List entitiesSelected = new ArrayList(); - for (ATSEntity entity : new PriorityQueue(entities.values())) { - if (entitiesSelected.size() >= limit) { - break; - } - if (!entity.getEntityType().equals(entityType)) { - continue; - } - if (entity.getStartTime() <= windowStart) { - continue; - } - if (entity.getStartTime() > windowEnd) { - continue; - } - if (primaryFilter != null && - !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) { - continue; - } - if (secondaryFilters != null) { // OR logic - boolean flag = false; - for (NameValuePair secondaryFilter : secondaryFilters) { - if (secondaryFilter != null && - matchFilter(entity.getOtherInfo(), secondaryFilter)) { - flag = true; - break; - } - } - if (!flag) { - continue; - } - } - entitiesSelected.add(entity); - } - List entitiesToReturn = new ArrayList(); - for (ATSEntity entitySelected : entitiesSelected) { - entitiesToReturn.add(maskFields(entitySelected, fields)); - } - Collections.sort(entitiesToReturn); - ATSEntities entitiesWrapper = new ATSEntities(); - entitiesWrapper.setEntities(entitiesToReturn); - return entitiesWrapper; - } - - @Override - public ATSEntity getEntity(String entityId, String entityType, - EnumSet fieldsToRetrieve) { - if (fieldsToRetrieve == null) { - fieldsToRetrieve = EnumSet.allOf(Field.class); - } - ATSEntity entity = entities.get(new EntityIdentifier(entityId, entityType)); - if (entity == null) { - return null; - } else { - return maskFields(entity, fieldsToRetrieve); - } - } - - @Override - public ATSEvents getEntityTimelines(String entityType, - SortedSet entityIds, Long limit, Long windowStart, - Long windowEnd, - Set eventTypes) { - ATSEvents allEvents = new ATSEvents(); - if (entityIds == null) { - return allEvents; - } - if (limit == null) { - limit = DEFAULT_LIMIT; - } - if (windowStart == null) { - windowStart = Long.MIN_VALUE; - } - if (windowEnd == null) { - windowEnd = Long.MAX_VALUE; - } - for (String entityId : entityIds) { - EntityIdentifier entityID = new EntityIdentifier(entityId, entityType); - ATSEntity entity = entities.get(entityID); - if (entity == null) { - continue; - } - ATSEventsOfOneEntity events = new ATSEventsOfOneEntity(); - events.setEntityId(entityId); - events.setEntityType(entityType); - for (ATSEvent event : entity.getEvents()) { - if (events.getEvents().size() >= limit) { - break; - } - if (event.getTimestamp() <= windowStart) { - continue; - } - if (event.getTimestamp() > windowEnd) { - continue; - } - if (eventTypes != null && !eventTypes.contains(event.getEventType())) { - continue; - } - events.addEvent(event); - } - allEvents.addEvent(events); - } - return allEvents; - } - - @Override - public ATSPutErrors put(ATSEntities data) { - ATSPutErrors errors = new ATSPutErrors(); - for (ATSEntity entity : data.getEntities()) { - EntityIdentifier entityId = - new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); - // store entity info in memory - ATSEntity existingEntity = entities.get(entityId); - if (existingEntity == null) { - existingEntity = new ATSEntity(); - existingEntity.setEntityId(entity.getEntityId()); - existingEntity.setEntityType(entity.getEntityType()); - existingEntity.setStartTime(entity.getStartTime()); - entities.put(entityId, existingEntity); - } - if (entity.getEvents() != null) { - if (existingEntity.getEvents() == null) { - existingEntity.setEvents(entity.getEvents()); - } else { - existingEntity.addEvents(entity.getEvents()); - } - Collections.sort(existingEntity.getEvents()); - } - // check startTime - if (existingEntity.getStartTime() == null) { - if (existingEntity.getEvents() == null - || existingEntity.getEvents().isEmpty()) { - ATSPutError error = new ATSPutError(); - error.setEntityId(entityId.getId()); - error.setEntityType(entityId.getType()); - error.setErrorCode(ATSPutError.NO_START_TIME); - errors.addError(error); - entities.remove(entityId); - continue; - } else { - existingEntity.setStartTime(entity.getEvents().get(0).getTimestamp()); - } - } - if (entity.getPrimaryFilters() != null) { - if (existingEntity.getPrimaryFilters() == null) { - existingEntity.setPrimaryFilters(entity.getPrimaryFilters()); - } else { - existingEntity.addPrimaryFilters(entity.getPrimaryFilters()); - } - } - if (entity.getOtherInfo() != null) { - if (existingEntity.getOtherInfo() == null) { - existingEntity.setOtherInfo(entity.getOtherInfo()); - } else { - existingEntity.addOtherInfo(entity.getOtherInfo()); - } - } - // relate it to other entities - if (entity.getRelatedEntities() == null) { - continue; - } - for (Map.Entry> partRelatedEntities : entity - .getRelatedEntities().entrySet()) { - if (partRelatedEntities == null) { - continue; - } - for (String idStr : partRelatedEntities.getValue()) { - EntityIdentifier relatedEntityId = - new EntityIdentifier(idStr, partRelatedEntities.getKey()); - ATSEntity relatedEntity = entities.get(relatedEntityId); - if (relatedEntity != null) { - relatedEntity.addRelatedEntity( - existingEntity.getEntityType(), existingEntity.getEntityId()); - } else { - relatedEntity = new ATSEntity(); - relatedEntity.setEntityId(relatedEntityId.getId()); - relatedEntity.setEntityType(relatedEntityId.getType()); - relatedEntity.setStartTime(existingEntity.getStartTime()); - relatedEntity.addRelatedEntity(existingEntity.getEntityType(), - existingEntity.getEntityId()); - entities.put(relatedEntityId, relatedEntity); - } - } - } - } - return errors; - } - - private static ATSEntity maskFields( - ATSEntity entity, EnumSet fields) { - // Conceal the fields that are not going to be exposed - ATSEntity entityToReturn = new ATSEntity(); - entityToReturn.setEntityId(entity.getEntityId()); - entityToReturn.setEntityType(entity.getEntityType()); - entityToReturn.setStartTime(entity.getStartTime()); - entityToReturn.setEvents(fields.contains(Field.EVENTS) ? - entity.getEvents() : fields.contains(Field.LAST_EVENT_ONLY) ? - Arrays.asList(entity.getEvents().get(0)) : null); - entityToReturn.setRelatedEntities(fields.contains(Field.RELATED_ENTITIES) ? - entity.getRelatedEntities() : null); - entityToReturn.setPrimaryFilters(fields.contains(Field.PRIMARY_FILTERS) ? - entity.getPrimaryFilters() : null); - entityToReturn.setOtherInfo(fields.contains(Field.OTHER_INFO) ? - entity.getOtherInfo() : null); - return entityToReturn; - } - - private static boolean matchFilter(Map tags, - NameValuePair filter) { - Object value = tags.get(filter.getName()); - if (value == null) { // doesn't have the filter - return false; - } else if (!value.equals(filter.getValue())) { // doesn't match the filter - return false; - } - return true; - } - - private static boolean matchPrimaryFilter(Map> tags, - NameValuePair filter) { - Set value = tags.get(filter.getName()); - if (value == null) { // doesn't have the filter - return false; - } else { - return value.contains(filter.getValue()); - } - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/NameValuePair.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/NameValuePair.java deleted file mode 100644 index 66a21bb..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/NameValuePair.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * A class holding a name and value pair, used for specifying filters in - * {@link ApplicationTimelineReader}. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class NameValuePair { - String name; - Object value; - - public NameValuePair(String name, Object value) { - this.name = name; - this.value = value; - } - - /** - * Get the name. - * @return The name. - */ - public String getName() { - - return name; - } - - /** - * Get the value. - * @return The value. - */ - public Object getValue() { - return value; - } - - @Override - public String toString() { - return "{ name: " + name + ", value: " + value + " }"; - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/package-info.java deleted file mode 100644 index c3aaafe..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/package-info.java +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@InterfaceAudience.Private -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; -import org.apache.hadoop.classification.InterfaceAudience; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java new file mode 100644 index 0000000..4b202d8 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * The unique identifier for an entity + */ +@Private +@Unstable +public class EntityIdentifier implements Comparable { + + private String id; + private String type; + + public EntityIdentifier(String id, String type) { + this.id = id; + this.type = type; + } + + /** + * Get the entity Id. + * @return The entity Id. + */ + public String getId() { + return id; + } + + /** + * Get the entity type. + * @return The entity type. + */ + public String getType() { + return type; + } + + @Override + public int compareTo(EntityIdentifier other) { + int c = type.compareTo(other.type); + if (c != 0) return c; + return id.compareTo(other.id); + } + + @Override + public int hashCode() { + // generated by eclipse + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + // generated by eclipse + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + EntityIdentifier other = (EntityIdentifier) obj; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + return true; + } + + @Override + public String toString() { + return "{ id: " + id + ", type: "+ type + " }"; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java new file mode 100644 index 0000000..7d1c54b --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java @@ -0,0 +1,222 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.WritableUtils; +import org.codehaus.jackson.map.ObjectMapper; + +/** + * A utility class providing methods for serializing and deserializing + * objects. The {@link #write(Object)}, {@link #read(byte[])} and {@link + * #write(java.io.DataOutputStream, Object)}, {@link + * #read(java.io.DataInputStream)} methods are used by the + * {@link LeveldbTimelineStore} to store and retrieve arbitrary + * JSON, while the {@link #writeReverseOrderedLong} and {@link + * #readReverseOrderedLong} methods are used to sort entities in descending + * start time order. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class GenericObjectMapper { + private static final byte[] EMPTY_BYTES = new byte[0]; + + private static final byte LONG = 0x1; + private static final byte INTEGER = 0x2; + private static final byte DOUBLE = 0x3; + private static final byte STRING = 0x4; + private static final byte BOOLEAN = 0x5; + private static final byte LIST = 0x6; + private static final byte MAP = 0x7; + + /** + * Serializes an Object into a byte array. Along with {@link #read(byte[]) }, + * can be used to serialize an Object and deserialize it into an Object of + * the same type without needing to specify the Object's type, + * as long as it is one of the JSON-compatible objects Long, Integer, + * Double, String, Boolean, List, or Map. The current implementation uses + * ObjectMapper to serialize complex objects (List and Map) while using + * Writable to serialize simpler objects, to produce fewer bytes. + * + * @param o An Object + * @return A byte array representation of the Object + * @throws IOException + */ + public static byte[] write(Object o) throws IOException { + if (o == null) + return EMPTY_BYTES; + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + write(new DataOutputStream(baos), o); + return baos.toByteArray(); + } + + /** + * Serializes an Object and writes it to a DataOutputStream. Along with + * {@link #read(java.io.DataInputStream)}, can be used to serialize an Object + * and deserialize it into an Object of the same type without needing to + * specify the Object's type, as long as it is one of the JSON-compatible + * objects Long, Integer, Double, String, Boolean, List, or Map. The current + * implementation uses ObjectMapper to serialize complex objects (List and + * Map) while using Writable to serialize simpler objects, to produce fewer + * bytes. + * + * @param dos A DataOutputStream + * @param o An Object + * @throws IOException + */ + public static void write(DataOutputStream dos, Object o) + throws IOException { + if (o == null) + return; + if (o instanceof Long) { + dos.write(LONG); + WritableUtils.writeVLong(dos, (Long) o); + } else if(o instanceof Integer) { + dos.write(INTEGER); + WritableUtils.writeVInt(dos, (Integer) o); + } else if(o instanceof Double) { + dos.write(DOUBLE); + dos.writeDouble((Double) o); + } else if (o instanceof String) { + dos.write(STRING); + WritableUtils.writeString(dos, (String) o); + } else if (o instanceof Boolean) { + dos.write(BOOLEAN); + dos.writeBoolean((Boolean) o); + } else if (o instanceof List) { + dos.write(LIST); + ObjectMapper mapper = new ObjectMapper(); + mapper.writeValue(dos, o); + } else if (o instanceof Map) { + dos.write(MAP); + ObjectMapper mapper = new ObjectMapper(); + mapper.writeValue(dos, o); + } else { + throw new IOException("Couldn't serialize object"); + } + } + + /** + * Deserializes an Object from a byte array created with + * {@link #write(Object)}. + * + * @param b A byte array + * @return An Object + * @throws IOException + */ + public static Object read(byte[] b) throws IOException { + return read(b, 0); + } + + /** + * Deserializes an Object from a byte array at a specified offset, assuming + * the bytes were created with {@link #write(Object)}. + * + * @param b A byte array + * @param offset Offset into the array + * @return An Object + * @throws IOException + */ + public static Object read(byte[] b, int offset) throws IOException { + if (b == null || b.length == 0) { + return null; + } + ByteArrayInputStream bais = new ByteArrayInputStream(b, offset, + b.length - offset); + return read(new DataInputStream(bais)); + } + + /** + * Reads an Object from a DataInputStream whose data has been written with + * {@link #write(java.io.DataOutputStream, Object)}. + * + * @param dis A DataInputStream + * @return An Object, null if an unrecognized type + * @throws IOException + */ + public static Object read(DataInputStream dis) throws IOException { + byte code = (byte)dis.read(); + ObjectMapper mapper; + switch (code) { + case LONG: + return WritableUtils.readVLong(dis); + case INTEGER: + return WritableUtils.readVInt(dis); + case DOUBLE: + return dis.readDouble(); + case STRING: + return WritableUtils.readString(dis); + case BOOLEAN: + return dis.readBoolean(); + case LIST: + mapper = new ObjectMapper(); + return mapper.readValue(dis, ArrayList.class); + case MAP: + mapper = new ObjectMapper(); + return mapper.readValue(dis, HashMap.class); + default: + return null; + } + } + + /** + * Converts a long to a 8-byte array so that lexicographic ordering of the + * produced byte arrays sort the longs in descending order. + * + * @param l A long + * @return A byte array + */ + public static byte[] writeReverseOrderedLong(long l) { + byte[] b = new byte[8]; + b[0] = (byte)(0x7f ^ ((l >> 56) & 0xff)); + for (int i = 1; i < 7; i++) + b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff)); + b[7] = (byte)(0xff ^ (l & 0xff)); + return b; + } + + /** + * Reads 8 bytes from an array starting at the specified offset and + * converts them to a long. The bytes are assumed to have been created + * with {@link #writeReverseOrderedLong}. + * + * @param b A byte array + * @param offset An offset into the byte array + * @return A long + */ + public static long readReverseOrderedLong(byte[] b, int offset) { + long l = b[offset] & 0xff; + for (int i = 1; i < 8; i++) { + l = l << 8; + l = l | (b[offset+i]&0xff); + } + return l ^ 0x7fffffffffffffffl; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java new file mode 100644 index 0000000..cc3e015 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java @@ -0,0 +1,873 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.commons.collections.map.LRUMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.WritableComparator; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.fusesource.leveldbjni.JniDBFactory; +import org.iq80.leveldb.DB; +import org.iq80.leveldb.DBIterator; +import org.iq80.leveldb.Options; +import org.iq80.leveldb.WriteBatch; + +import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.readReverseOrderedLong; +import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong; + +/** + * An implementation of a timeline store backed by leveldb. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class LeveldbTimelineStore extends AbstractService + implements TimelineStore { + private static final Log LOG = LogFactory + .getLog(LeveldbTimelineStore.class); + + private static final String FILENAME = "leveldb-timeline-store.ldb"; + + private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(); + private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(); + private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(); + + private static final byte[] PRIMARY_FILTER_COLUMN = "f".getBytes(); + private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(); + private static final byte[] RELATED_COLUMN = "r".getBytes(); + private static final byte[] TIME_COLUMN = "t".getBytes(); + + private static final byte[] EMPTY_BYTES = new byte[0]; + + private static final int START_TIME_CACHE_SIZE = 10000; + + @SuppressWarnings("unchecked") + private final Map startTimeCache = + Collections.synchronizedMap(new LRUMap(START_TIME_CACHE_SIZE)); + + private DB db; + + public LeveldbTimelineStore() { + super(LeveldbTimelineStore.class.getName()); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + Options options = new Options(); + options.createIfMissing(true); + JniDBFactory factory = new JniDBFactory(); + String path = conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH); + File p = new File(path); + if (!p.exists()) + if (!p.mkdirs()) + throw new IOException("Couldn't create directory for leveldb " + + "timeline store " + path); + LOG.info("Using leveldb path " + path); + db = factory.open(new File(path, FILENAME), options); + super.serviceInit(conf); + } + + @Override + protected void serviceStop() throws Exception { + IOUtils.cleanup(LOG, db); + super.serviceStop(); + } + + private static class KeyBuilder { + private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10; + private byte[][] b; + private boolean[] useSeparator; + private int index; + private int length; + + public KeyBuilder(int size) { + b = new byte[size][]; + useSeparator = new boolean[size]; + index = 0; + length = 0; + } + + public static KeyBuilder newInstance() { + return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS); + } + + public KeyBuilder add(String s) { + return add(s.getBytes(), true); + } + + public KeyBuilder add(byte[] t) { + return add(t, false); + } + + public KeyBuilder add(byte[] t, boolean sep) { + b[index] = t; + useSeparator[index] = sep; + length += t.length; + if (sep) + length++; + index++; + return this; + } + + public byte[] getBytes() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + for (int i = 0; i < index; i++) { + baos.write(b[i]); + if (i < index-1 && useSeparator[i]) + baos.write(0x0); + } + return baos.toByteArray(); + } + + public byte[] getBytesForLookup() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + for (int i = 0; i < index; i++) { + baos.write(b[i]); + if (useSeparator[i]) + baos.write(0x0); + } + return baos.toByteArray(); + } + } + + private static class KeyParser { + private final byte[] b; + private int offset; + + public KeyParser(byte[] b, int offset) { + this.b = b; + this.offset = offset; + } + + public String getNextString() throws IOException { + if (offset >= b.length) + throw new IOException( + "tried to read nonexistent string from byte array"); + int i = 0; + while (offset+i < b.length && b[offset+i] != 0x0) + i++; + String s = new String(b, offset, i); + offset = offset + i + 1; + return s; + } + + public long getNextLong() throws IOException { + if (offset+8 >= b.length) + throw new IOException("byte array ran out when trying to read long"); + long l = readReverseOrderedLong(b, offset); + offset += 8; + return l; + } + + public int getOffset() { + return offset; + } + } + + @Override + public TimelineEntity getEntity(String entityId, String entityType, + EnumSet fields) throws IOException { + DBIterator iterator = null; + try { + byte[] revStartTime = getStartTime(entityId, entityType, null, null, null); + if (revStartTime == null) + return null; + byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entityId).getBytesForLookup(); + + iterator = db.iterator(); + iterator.seek(prefix); + + return getEntity(entityId, entityType, + readReverseOrderedLong(revStartTime, 0), fields, iterator, prefix, + prefix.length); + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Read entity from a db iterator. If no information is found in the + * specified fields for this entity, return null. + */ + private static TimelineEntity getEntity(String entityId, String entityType, + Long startTime, EnumSet fields, DBIterator iterator, + byte[] prefix, int prefixlen) throws IOException { + if (fields == null) + fields = EnumSet.allOf(Field.class); + + TimelineEntity entity = new TimelineEntity(); + boolean events = false; + boolean lastEvent = false; + if (fields.contains(Field.EVENTS)) { + events = true; + entity.setEvents(new ArrayList()); + } else if (fields.contains(Field.LAST_EVENT_ONLY)) { + lastEvent = true; + entity.setEvents(new ArrayList()); + } + else { + entity.setEvents(null); + } + boolean relatedEntities = false; + if (fields.contains(Field.RELATED_ENTITIES)) { + relatedEntities = true; + } else { + entity.setRelatedEntities(null); + } + boolean primaryFilters = false; + if (fields.contains(Field.PRIMARY_FILTERS)) { + primaryFilters = true; + } else { + entity.setPrimaryFilters(null); + } + boolean otherInfo = false; + if (fields.contains(Field.OTHER_INFO)) { + otherInfo = true; + entity.setOtherInfo(new HashMap()); + } else { + entity.setOtherInfo(null); + } + + // iterate through the entity's entry, parsing information if it is part + // of a requested field + for (; iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefixlen, key)) + break; + if (key[prefixlen] == PRIMARY_FILTER_COLUMN[0]) { + if (primaryFilters) { + addPrimaryFilter(entity, key, + prefixlen + PRIMARY_FILTER_COLUMN.length); + } + } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) { + if (otherInfo) { + entity.addOtherInfo(parseRemainingKey(key, + prefixlen + OTHER_INFO_COLUMN.length), + GenericObjectMapper.read(iterator.peekNext().getValue())); + } + } else if (key[prefixlen] == RELATED_COLUMN[0]) { + if (relatedEntities) { + addRelatedEntity(entity, key, + prefixlen + RELATED_COLUMN.length); + } + } else if (key[prefixlen] == TIME_COLUMN[0]) { + if (events || (lastEvent && entity.getEvents().size() == 0)) { + TimelineEvent event = getEntityEvent(null, key, prefixlen + + TIME_COLUMN.length, iterator.peekNext().getValue()); + if (event != null) { + entity.addEvent(event); + } + } + } else { + LOG.warn(String.format("Found unexpected column for entity %s of " + + "type %s (0x%02x)", entityId, entityType, key[prefixlen])); + } + } + + entity.setEntityId(entityId); + entity.setEntityType(entityType); + entity.setStartTime(startTime); + + return entity; + } + + @Override + public TimelineEvents getEntityTimelines(String entityType, + SortedSet entityIds, Long limit, Long windowStart, + Long windowEnd, Set eventType) throws IOException { + TimelineEvents events = new TimelineEvents(); + if (entityIds == null || entityIds.isEmpty()) + return events; + // create a lexicographically-ordered map from start time to entities + Map> startTimeMap = new TreeMap>(new Comparator() { + @Override + public int compare(byte[] o1, byte[] o2) { + return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, + o2.length); + } + }); + DBIterator iterator = null; + try { + // look up start times for the specified entities + // skip entities with no start time + for (String entity : entityIds) { + byte[] startTime = getStartTime(entity, entityType, null, null, null); + if (startTime != null) { + List entities = startTimeMap.get(startTime); + if (entities == null) { + entities = new ArrayList(); + startTimeMap.put(startTime, entities); + } + entities.add(new EntityIdentifier(entity, entityType)); + } + } + for (Entry> entry : + startTimeMap.entrySet()) { + // look up the events matching the given parameters (limit, + // start time, end time, event types) for entities whose start times + // were found and add the entities to the return list + byte[] revStartTime = entry.getKey(); + for (EntityIdentifier entityID : entry.getValue()) { + EventsOfOneEntity entity = new EventsOfOneEntity(); + entity.setEntityId(entityID.getId()); + entity.setEntityType(entityType); + events.addEvent(entity); + KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entityID.getId()) + .add(TIME_COLUMN); + byte[] prefix = kb.getBytesForLookup(); + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + byte[] revts = writeReverseOrderedLong(windowEnd); + kb.add(revts); + byte[] first = kb.getBytesForLookup(); + byte[] last = null; + if (windowStart != null) { + last = KeyBuilder.newInstance().add(prefix) + .add(writeReverseOrderedLong(windowStart)).getBytesForLookup(); + } + if (limit == null) { + limit = DEFAULT_LIMIT; + } + iterator = db.iterator(); + for (iterator.seek(first); entity.getEvents().size() < limit && + iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key) || (last != null && + WritableComparator.compareBytes(key, 0, key.length, last, 0, + last.length) > 0)) + break; + TimelineEvent event = getEntityEvent(eventType, key, prefix.length, + iterator.peekNext().getValue()); + if (event != null) + entity.addEvent(event); + } + } + } + } finally { + IOUtils.cleanup(LOG, iterator); + } + return events; + } + + /** + * Returns true if the byte array begins with the specified prefix. + */ + private static boolean prefixMatches(byte[] prefix, int prefixlen, + byte[] b) { + if (b.length < prefixlen) + return false; + return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0, + prefixlen) == 0; + } + + @Override + public TimelineEntities getEntities(String entityType, + Long limit, Long windowStart, Long windowEnd, + NameValuePair primaryFilter, Collection secondaryFilters, + EnumSet fields) throws IOException { + if (primaryFilter == null) { + // if no primary filter is specified, prefix the lookup with + // ENTITY_ENTRY_PREFIX + return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit, + windowStart, windowEnd, secondaryFilters, fields); + } else { + // if a primary filter is specified, prefix the lookup with + // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue + + // ENTITY_ENTRY_PREFIX + byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) + .add(primaryFilter.getName()) + .add(GenericObjectMapper.write(primaryFilter.getValue()), true) + .add(ENTITY_ENTRY_PREFIX).getBytesForLookup(); + return getEntityByTime(base, entityType, limit, windowStart, windowEnd, + secondaryFilters, fields); + } + } + + /** + * Retrieves a list of entities satisfying given parameters. + * + * @param base A byte array prefix for the lookup + * @param entityType The type of the entity + * @param limit A limit on the number of entities to return + * @param starttime The earliest entity start time to retrieve (exclusive) + * @param endtime The latest entity start time to retrieve (inclusive) + * @param secondaryFilters Filter pairs that the entities should match + * @param fields The set of fields to retrieve + * @return A list of entities + * @throws IOException + */ + private TimelineEntities getEntityByTime(byte[] base, + String entityType, Long limit, Long starttime, Long endtime, + Collection secondaryFilters, EnumSet fields) + throws IOException { + DBIterator iterator = null; + try { + KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); + // only db keys matching the prefix (base + entity type) will be parsed + byte[] prefix = kb.getBytesForLookup(); + if (endtime == null) { + // if end time is null, place no restriction on end time + endtime = Long.MAX_VALUE; + } + // using end time, construct a first key that will be seeked to + byte[] revts = writeReverseOrderedLong(endtime); + kb.add(revts); + byte[] first = kb.getBytesForLookup(); + byte[] last = null; + if (starttime != null) { + // if start time is not null, set a last key that will not be + // iterated past + last = KeyBuilder.newInstance().add(base).add(entityType) + .add(writeReverseOrderedLong(starttime)).getBytesForLookup(); + } + if (limit == null) { + // if limit is not specified, use the default + limit = DEFAULT_LIMIT; + } + + TimelineEntities entities = new TimelineEntities(); + iterator = db.iterator(); + iterator.seek(first); + // iterate until one of the following conditions is met: limit is + // reached, there are no more keys, the key prefix no longer matches, + // or a start time has been specified and reached/exceeded + while (entities.getEntities().size() < limit && iterator.hasNext()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key) || (last != null && + WritableComparator.compareBytes(key, 0, key.length, last, 0, + last.length) > 0)) + break; + // read the start time and entityId from the current key + KeyParser kp = new KeyParser(key, prefix.length); + Long startTime = kp.getNextLong(); + String entityId = kp.getNextString(); + // parse the entity that owns this key, iterating over all keys for + // the entity + TimelineEntity entity = getEntity(entityId, entityType, startTime, + fields, iterator, key, kp.getOffset()); + if (entity == null) + continue; + // determine if the retrieved entity matches the provided secondary + // filters, and if so add it to the list of entities to return + boolean filterPassed = true; + if (secondaryFilters != null) { + for (NameValuePair filter : secondaryFilters) { + Object v = entity.getOtherInfo().get(filter.getName()); + if (v == null) { + Set vs = entity.getPrimaryFilters() + .get(filter.getName()); + if (vs != null && !vs.contains(filter.getValue())) { + filterPassed = false; + break; + } + } else if (!v.equals(filter.getValue())) { + filterPassed = false; + break; + } + } + } + if (filterPassed) + entities.addEntity(entity); + } + return entities; + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Put a single entity. If there is an error, add a TimelinePutError to the given + * response. + */ + private void put(TimelineEntity entity, TimelinePutResponse response) { + WriteBatch writeBatch = null; + try { + writeBatch = db.createWriteBatch(); + List events = entity.getEvents(); + // look up the start time for the entity + byte[] revStartTime = getStartTime(entity.getEntityId(), + entity.getEntityType(), entity.getStartTime(), events, + writeBatch); + if (revStartTime == null) { + // if no start time is found, add an error and return + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.NO_START_TIME); + response.addError(error); + return; + } + Long revStartTimeLong = readReverseOrderedLong(revStartTime, 0); + Map> primaryFilters = entity.getPrimaryFilters(); + + // write event entries + if (events != null && !events.isEmpty()) { + for (TimelineEvent event : events) { + byte[] revts = writeReverseOrderedLong(event.getTimestamp()); + byte[] key = createEntityEventKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, revts, + event.getEventType()); + byte[] value = GenericObjectMapper.write(event.getEventInfo()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + + // write related entity entries + Map> relatedEntities = + entity.getRelatedEntities(); + if (relatedEntities != null && !relatedEntities.isEmpty()) { + for (Entry> relatedEntityList : + relatedEntities.entrySet()) { + String relatedEntityType = relatedEntityList.getKey(); + for (String relatedEntityId : relatedEntityList.getValue()) { + // look up start time of related entity + byte[] relatedEntityStartTime = getStartTime(relatedEntityId, + relatedEntityType, null, null, writeBatch); + if (relatedEntityStartTime == null) { + // if start time is not found, set start time of the related + // entity to the start time of this entity, and write it to the + // db and the cache + relatedEntityStartTime = revStartTime; + writeBatch.put(createStartTimeLookupKey(relatedEntityId, + relatedEntityType), relatedEntityStartTime); + startTimeCache.put(new EntityIdentifier(relatedEntityId, + relatedEntityType), revStartTimeLong); + } + // write reverse entry (related entity -> entity) + byte[] key = createReleatedEntityKey(relatedEntityId, + relatedEntityType, relatedEntityStartTime, + entity.getEntityId(), entity.getEntityType()); + writeBatch.put(key, EMPTY_BYTES); + // TODO: write forward entry (entity -> related entity)? + } + } + } + + // write primary filter entries + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry> primaryFilter : + primaryFilters.entrySet()) { + for (Object primaryFilterValue : primaryFilter.getValue()) { + byte[] key = createPrimaryFilterKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, + primaryFilter.getKey(), primaryFilterValue); + writeBatch.put(key, EMPTY_BYTES); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, + EMPTY_BYTES); + } + } + } + + // write other info entries + Map otherInfo = entity.getOtherInfo(); + if (otherInfo != null && !otherInfo.isEmpty()) { + for (Entry i : otherInfo.entrySet()) { + byte[] key = createOtherInfoKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, i.getKey()); + byte[] value = GenericObjectMapper.write(i.getValue()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + db.write(writeBatch); + } catch (IOException e) { + LOG.error("Error putting entity " + entity.getEntityId() + + " of type " + entity.getEntityType(), e); + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.IO_EXCEPTION); + response.addError(error); + } finally { + IOUtils.cleanup(LOG, writeBatch); + } + } + + /** + * For a given key / value pair that has been written to the db, + * write additional entries to the db for each primary filter. + */ + private static void writePrimaryFilterEntries(WriteBatch writeBatch, + Map> primaryFilters, byte[] key, byte[] value) + throws IOException { + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry> pf : primaryFilters.entrySet()) { + for (Object pfval : pf.getValue()) { + writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval, + key), value); + } + } + } + } + + @Override + public TimelinePutResponse put(TimelineEntities entities) { + TimelinePutResponse response = new TimelinePutResponse(); + for (TimelineEntity entity : entities.getEntities()) { + put(entity, response); + } + return response; + } + + /** + * Get the unique start time for a given entity as a byte array that sorts + * the timestamps in reverse order (see {@link + * GenericObjectMapper#writeReverseOrderedLong(long)}). + * + * @param entityId The id of the entity + * @param entityType The type of the entity + * @param startTime The start time of the entity, or null + * @param events A list of events for the entity, or null + * @param writeBatch A leveldb write batch, if the method is called by a + * put as opposed to a get + * @return A byte array + * @throws IOException + */ + private byte[] getStartTime(String entityId, String entityType, + Long startTime, List events, WriteBatch writeBatch) + throws IOException { + EntityIdentifier entity = new EntityIdentifier(entityId, entityType); + if (startTime == null) { + // start time is not provided, so try to look it up + if (startTimeCache.containsKey(entity)) { + // found the start time in the cache + startTime = startTimeCache.get(entity); + } else { + // try to look up the start time in the db + byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); + byte[] v = db.get(b); + if (v == null) { + // did not find the start time in the db + // if this is a put, try to set it from the provided events + if (events == null || writeBatch == null) { + // no events, or not a put, so return null + return null; + } + Long min = Long.MAX_VALUE; + for (TimelineEvent e : events) + if (min > e.getTimestamp()) + min = e.getTimestamp(); + startTime = min; + // selected start time as minimum timestamp of provided events + // write start time to db and cache + writeBatch.put(b, writeReverseOrderedLong(startTime)); + startTimeCache.put(entity, startTime); + } else { + // found the start time in the db + startTime = readReverseOrderedLong(v, 0); + if (writeBatch != null) { + // if this is a put, re-add the start time to the cache + startTimeCache.put(entity, startTime); + } + } + } + } else { + // start time is provided + // TODO: verify start time in db as well as cache? + if (startTimeCache.containsKey(entity)) { + // if the start time is already in the cache, + // and it is different from the provided start time, + // use the one from the cache + if (!startTime.equals(startTimeCache.get(entity))) + startTime = startTimeCache.get(entity); + } else if (writeBatch != null) { + // if this is a put, write the provided start time to the db and the + // cache + byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); + writeBatch.put(b, writeReverseOrderedLong(startTime)); + startTimeCache.put(entity, startTime); + } + } + return writeReverseOrderedLong(startTime); + } + + /** + * Creates a key for looking up the start time of a given entity, + * of the form START_TIME_LOOKUP_PREFIX + entitytype + entity. + */ + private static byte[] createStartTimeLookupKey(String entity, + String entitytype) throws IOException { + return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX) + .add(entitytype).add(entity).getBytes(); + } + + /** + * Creates an index entry for the given key of the form + * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key. + */ + private static byte[] addPrimaryFilterToKey(String primaryFilterName, + Object primaryFilterValue, byte[] key) throws IOException { + return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) + .add(primaryFilterName) + .add(GenericObjectMapper.write(primaryFilterValue), true).add(key) + .getBytes(); + } + + /** + * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entitytype + + * revstarttime + entity + TIME_COLUMN + reveventtimestamp + eventtype. + */ + private static byte[] createEntityEventKey(String entity, String entitytype, + byte[] revStartTime, byte[] reveventtimestamp, String eventtype) + throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entitytype).add(revStartTime).add(entity).add(TIME_COLUMN) + .add(reveventtimestamp).add(eventtype).getBytes(); + } + + /** + * Creates an event object from the given key, offset, and value. If the + * event type is not contained in the specified set of event types, + * returns null. + */ + private static TimelineEvent getEntityEvent(Set eventTypes, byte[] key, + int offset, byte[] value) throws IOException { + KeyParser kp = new KeyParser(key, offset); + long ts = kp.getNextLong(); + String tstype = kp.getNextString(); + if (eventTypes == null || eventTypes.contains(tstype)) { + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(ts); + event.setEventType(tstype); + Object o = GenericObjectMapper.read(value); + if (o == null) { + event.setEventInfo(null); + } else if (o instanceof Map) { + @SuppressWarnings("unchecked") + Map m = (Map) o; + event.setEventInfo(m); + } else { + throw new IOException("Couldn't deserialize event info map"); + } + return event; + } + return null; + } + + /** + * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX + + * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name + value. + */ + private static byte[] createPrimaryFilterKey(String entity, + String entitytype, byte[] revStartTime, String name, Object value) + throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) + .add(revStartTime).add(entity).add(PRIMARY_FILTER_COLUMN).add(name) + .add(GenericObjectMapper.write(value)).getBytes(); + } + + /** + * Parses the primary filter from the given key at the given offset and + * adds it to the given entity. + */ + private static void addPrimaryFilter(TimelineEntity entity, byte[] key, + int offset) throws IOException { + KeyParser kp = new KeyParser(key, offset); + String name = kp.getNextString(); + Object value = GenericObjectMapper.read(key, kp.getOffset()); + entity.addPrimaryFilter(name, value); + } + + /** + * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entitytype + + * revstarttime + entity + OTHER_INFO_COLUMN + name. + */ + private static byte[] createOtherInfoKey(String entity, String entitytype, + byte[] revStartTime, String name) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) + .add(revStartTime).add(entity).add(OTHER_INFO_COLUMN).add(name) + .getBytes(); + } + + /** + * Creates a string representation of the byte array from the given offset + * to the end of the array (for parsing other info keys). + */ + private static String parseRemainingKey(byte[] b, int offset) { + return new String(b, offset, b.length - offset); + } + + /** + * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX + + * entitytype + revstarttime + entity + RELATED_COLUMN + relatedentitytype + + * relatedentity. + */ + private static byte[] createReleatedEntityKey(String entity, + String entitytype, byte[] revStartTime, String relatedEntity, + String relatedEntityType) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) + .add(revStartTime).add(entity).add(RELATED_COLUMN) + .add(relatedEntityType).add(relatedEntity).getBytes(); + } + + /** + * Parses the related entity from the given key at the given offset and + * adds it to the given entity. + */ + private static void addRelatedEntity(TimelineEntity entity, byte[] key, + int offset) throws IOException { + KeyParser kp = new KeyParser(key, offset); + String type = kp.getNextString(); + String id = kp.getNextString(); + entity.addRelatedEntity(type, id); + } + + /** + * Clears the cache to test reloading start times from leveldb (only for + * testing). + */ + @VisibleForTesting + void clearStartTimeCache() { + startTimeCache.clear(); + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java new file mode 100644 index 0000000..6b761d7 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java @@ -0,0 +1,306 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.PriorityQueue; +import java.util.Set; +import java.util.SortedSet; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; + +/** + * In-memory implementation of {@link TimelineStore}. This + * implementation is for test purpose only. If users improperly instantiate it, + * they may encounter reading and writing history data in different memory + * store. + * + */ +@Private +@Unstable +public class MemoryTimelineStore + extends AbstractService implements TimelineStore { + + private Map entities = + new HashMap(); + + public MemoryTimelineStore() { + super(MemoryTimelineStore.class.getName()); + } + + @Override + public TimelineEntities getEntities(String entityType, Long limit, + Long windowStart, Long windowEnd, NameValuePair primaryFilter, + Collection secondaryFilters, EnumSet fields) { + if (limit == null) { + limit = DEFAULT_LIMIT; + } + if (windowStart == null) { + windowStart = Long.MIN_VALUE; + } + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + if (fields == null) { + fields = EnumSet.allOf(Field.class); + } + List entitiesSelected = new ArrayList(); + for (TimelineEntity entity : new PriorityQueue(entities.values())) { + if (entitiesSelected.size() >= limit) { + break; + } + if (!entity.getEntityType().equals(entityType)) { + continue; + } + if (entity.getStartTime() <= windowStart) { + continue; + } + if (entity.getStartTime() > windowEnd) { + continue; + } + if (primaryFilter != null && + !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) { + continue; + } + if (secondaryFilters != null) { // OR logic + boolean flag = false; + for (NameValuePair secondaryFilter : secondaryFilters) { + if (secondaryFilter != null && + matchFilter(entity.getOtherInfo(), secondaryFilter)) { + flag = true; + break; + } + } + if (!flag) { + continue; + } + } + entitiesSelected.add(entity); + } + List entitiesToReturn = new ArrayList(); + for (TimelineEntity entitySelected : entitiesSelected) { + entitiesToReturn.add(maskFields(entitySelected, fields)); + } + Collections.sort(entitiesToReturn); + TimelineEntities entitiesWrapper = new TimelineEntities(); + entitiesWrapper.setEntities(entitiesToReturn); + return entitiesWrapper; + } + + @Override + public TimelineEntity getEntity(String entityId, String entityType, + EnumSet fieldsToRetrieve) { + if (fieldsToRetrieve == null) { + fieldsToRetrieve = EnumSet.allOf(Field.class); + } + TimelineEntity entity = entities.get(new EntityIdentifier(entityId, entityType)); + if (entity == null) { + return null; + } else { + return maskFields(entity, fieldsToRetrieve); + } + } + + @Override + public TimelineEvents getEntityTimelines(String entityType, + SortedSet entityIds, Long limit, Long windowStart, + Long windowEnd, + Set eventTypes) { + TimelineEvents allEvents = new TimelineEvents(); + if (entityIds == null) { + return allEvents; + } + if (limit == null) { + limit = DEFAULT_LIMIT; + } + if (windowStart == null) { + windowStart = Long.MIN_VALUE; + } + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + for (String entityId : entityIds) { + EntityIdentifier entityID = new EntityIdentifier(entityId, entityType); + TimelineEntity entity = entities.get(entityID); + if (entity == null) { + continue; + } + EventsOfOneEntity events = new EventsOfOneEntity(); + events.setEntityId(entityId); + events.setEntityType(entityType); + for (TimelineEvent event : entity.getEvents()) { + if (events.getEvents().size() >= limit) { + break; + } + if (event.getTimestamp() <= windowStart) { + continue; + } + if (event.getTimestamp() > windowEnd) { + continue; + } + if (eventTypes != null && !eventTypes.contains(event.getEventType())) { + continue; + } + events.addEvent(event); + } + allEvents.addEvent(events); + } + return allEvents; + } + + @Override + public TimelinePutResponse put(TimelineEntities data) { + TimelinePutResponse response = new TimelinePutResponse(); + for (TimelineEntity entity : data.getEntities()) { + EntityIdentifier entityId = + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); + // store entity info in memory + TimelineEntity existingEntity = entities.get(entityId); + if (existingEntity == null) { + existingEntity = new TimelineEntity(); + existingEntity.setEntityId(entity.getEntityId()); + existingEntity.setEntityType(entity.getEntityType()); + existingEntity.setStartTime(entity.getStartTime()); + entities.put(entityId, existingEntity); + } + if (entity.getEvents() != null) { + if (existingEntity.getEvents() == null) { + existingEntity.setEvents(entity.getEvents()); + } else { + existingEntity.addEvents(entity.getEvents()); + } + Collections.sort(existingEntity.getEvents()); + } + // check startTime + if (existingEntity.getStartTime() == null) { + if (existingEntity.getEvents() == null + || existingEntity.getEvents().isEmpty()) { + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entityId.getId()); + error.setEntityType(entityId.getType()); + error.setErrorCode(TimelinePutError.NO_START_TIME); + response.addError(error); + entities.remove(entityId); + continue; + } else { + existingEntity.setStartTime(entity.getEvents().get(0).getTimestamp()); + } + } + if (entity.getPrimaryFilters() != null) { + if (existingEntity.getPrimaryFilters() == null) { + existingEntity.setPrimaryFilters(entity.getPrimaryFilters()); + } else { + existingEntity.addPrimaryFilters(entity.getPrimaryFilters()); + } + } + if (entity.getOtherInfo() != null) { + if (existingEntity.getOtherInfo() == null) { + existingEntity.setOtherInfo(entity.getOtherInfo()); + } else { + existingEntity.addOtherInfo(entity.getOtherInfo()); + } + } + // relate it to other entities + if (entity.getRelatedEntities() == null) { + continue; + } + for (Map.Entry> partRelatedEntities : entity + .getRelatedEntities().entrySet()) { + if (partRelatedEntities == null) { + continue; + } + for (String idStr : partRelatedEntities.getValue()) { + EntityIdentifier relatedEntityId = + new EntityIdentifier(idStr, partRelatedEntities.getKey()); + TimelineEntity relatedEntity = entities.get(relatedEntityId); + if (relatedEntity != null) { + relatedEntity.addRelatedEntity( + existingEntity.getEntityType(), existingEntity.getEntityId()); + } else { + relatedEntity = new TimelineEntity(); + relatedEntity.setEntityId(relatedEntityId.getId()); + relatedEntity.setEntityType(relatedEntityId.getType()); + relatedEntity.setStartTime(existingEntity.getStartTime()); + relatedEntity.addRelatedEntity(existingEntity.getEntityType(), + existingEntity.getEntityId()); + entities.put(relatedEntityId, relatedEntity); + } + } + } + } + return response; + } + + private static TimelineEntity maskFields( + TimelineEntity entity, EnumSet fields) { + // Conceal the fields that are not going to be exposed + TimelineEntity entityToReturn = new TimelineEntity(); + entityToReturn.setEntityId(entity.getEntityId()); + entityToReturn.setEntityType(entity.getEntityType()); + entityToReturn.setStartTime(entity.getStartTime()); + entityToReturn.setEvents(fields.contains(Field.EVENTS) ? + entity.getEvents() : fields.contains(Field.LAST_EVENT_ONLY) ? + Arrays.asList(entity.getEvents().get(0)) : null); + entityToReturn.setRelatedEntities(fields.contains(Field.RELATED_ENTITIES) ? + entity.getRelatedEntities() : null); + entityToReturn.setPrimaryFilters(fields.contains(Field.PRIMARY_FILTERS) ? + entity.getPrimaryFilters() : null); + entityToReturn.setOtherInfo(fields.contains(Field.OTHER_INFO) ? + entity.getOtherInfo() : null); + return entityToReturn; + } + + private static boolean matchFilter(Map tags, + NameValuePair filter) { + Object value = tags.get(filter.getName()); + if (value == null) { // doesn't have the filter + return false; + } else if (!value.equals(filter.getValue())) { // doesn't match the filter + return false; + } + return true; + } + + private static boolean matchPrimaryFilter(Map> tags, + NameValuePair filter) { + Set value = tags.get(filter.getName()); + if (value == null) { // doesn't have the filter + return false; + } else { + return value.contains(filter.getValue()); + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java new file mode 100644 index 0000000..d8dabd2 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * A class holding a name and value pair, used for specifying filters in + * {@link TimelineReader}. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class NameValuePair { + String name; + Object value; + + public NameValuePair(String name, Object value) { + this.name = name; + this.value = value; + } + + /** + * Get the name. + * @return The name. + */ + public String getName() { + + return name; + } + + /** + * Get the value. + * @return The value. + */ + public Object getValue() { + return value; + } + + @Override + public String toString() { + return "{ name: " + name + ", value: " + value + " }"; + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java new file mode 100644 index 0000000..f0be2df --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java @@ -0,0 +1,141 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import java.io.IOException; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Set; +import java.util.SortedSet; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; + +/** + * This interface is for retrieving timeline information. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public interface TimelineReader { + + /** + * Possible fields to retrieve for {@link #getEntities} and {@link #getEntity} + * . + */ + enum Field { + EVENTS, + RELATED_ENTITIES, + PRIMARY_FILTERS, + OTHER_INFO, + LAST_EVENT_ONLY + } + + /** + * Default limit for {@link #getEntities} and {@link #getEntityTimelines}. + */ + final long DEFAULT_LIMIT = 100; + + /** + * This method retrieves a list of entity information, {@link TimelineEntity}, sorted + * by the starting timestamp for the entity, descending. + * + * @param entityType + * The type of entities to return (required). + * @param limit + * A limit on the number of entities to return. If null, defaults to + * {@link #DEFAULT_LIMIT}. + * @param windowStart + * The earliest start timestamp to retrieve (exclusive). If null, + * defaults to retrieving all entities until the limit is reached. + * @param windowEnd + * The latest start timestamp to retrieve (inclusive). If null, + * defaults to {@link Long#MAX_VALUE} + * @param primaryFilter + * Retrieves only entities that have the specified primary filter. If + * null, retrieves all entities. This is an indexed retrieval, and no + * entities that do not match the filter are scanned. + * @param secondaryFilters + * Retrieves only entities that have exact matches for all the + * specified filters in their primary filters or other info. This is + * not an indexed retrieval, so all entities are scanned but only + * those matching the filters are returned. + * @param fieldsToRetrieve + * Specifies which fields of the entity object to retrieve (see + * {@link Field}). If the set of fields contains + * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the + * most recent event for each entity is retrieved. If null, retrieves + * all fields. + * @return An {@link TimelineEntities} object. + * @throws IOException + */ + TimelineEntities getEntities(String entityType, + Long limit, Long windowStart, Long windowEnd, + NameValuePair primaryFilter, Collection secondaryFilters, + EnumSet fieldsToRetrieve) throws IOException; + + /** + * This method retrieves the entity information for a given entity. + * + * @param entityId + * The entity whose information will be retrieved. + * @param entityType + * The type of the entity. + * @param fieldsToRetrieve + * Specifies which fields of the entity object to retrieve (see + * {@link Field}). If the set of fields contains + * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the + * most recent event for each entity is retrieved. If null, retrieves + * all fields. + * @return An {@link TimelineEntity} object. + * @throws IOException + */ + TimelineEntity getEntity(String entityId, String entityType, EnumSet + fieldsToRetrieve) throws IOException; + + /** + * This method retrieves the events for a list of entities all of the same + * entity type. The events for each entity are sorted in order of their + * timestamps, descending. + * + * @param entityType + * The type of entities to retrieve events for. + * @param entityIds + * The entity IDs to retrieve events for. + * @param limit + * A limit on the number of events to return for each entity. If + * null, defaults to {@link #DEFAULT_LIMIT} events per entity. + * @param windowStart + * If not null, retrieves only events later than the given time + * (exclusive) + * @param windowEnd + * If not null, retrieves only events earlier than the given time + * (inclusive) + * @param eventTypes + * Restricts the events returned to the given types. If null, events + * of all types will be returned. + * @return An {@link TimelineEvents} object. + * @throws IOException + */ + TimelineEvents getEntityTimelines(String entityType, + SortedSet entityIds, Long limit, Long windowStart, + Long windowEnd, Set eventTypes) throws IOException; +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java new file mode 100644 index 0000000..6b50d83 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.service.Service; + +@InterfaceAudience.Private +@InterfaceStability.Unstable +public interface TimelineStore extends + Service, TimelineReader, TimelineWriter { +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java new file mode 100644 index 0000000..8f28d82 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; + +import java.io.IOException; + +/** + * This interface is for storing timeline information. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public interface TimelineWriter { + + /** + * Stores entity information to the timeline store. Any errors occurring for + * individual put request objects will be reported in the response. + * + * @param data + * An {@link TimelineEntities} object. + * @return An {@link TimelinePutResponse} object. + * @throws IOException + */ + TimelinePutResponse put(TimelineEntities data) throws IOException; + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java new file mode 100644 index 0000000..970e868 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java @@ -0,0 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@InterfaceAudience.Private +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; +import org.apache.hadoop.classification.InterfaceAudience; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java index d2cfc32..93065b3 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java @@ -21,7 +21,7 @@ import org.apache.hadoop.yarn.server.api.ApplicationContext; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; @@ -30,22 +30,22 @@ public class AHSWebApp extends WebApp implements YarnWebParams { private final ApplicationHistoryManager applicationHistoryManager; - private final ApplicationTimelineStore applicationTimelineStore; + private final TimelineStore timelineStore; public AHSWebApp(ApplicationHistoryManager applicationHistoryManager, - ApplicationTimelineStore applicationTimelineStore) { + TimelineStore timelineStore) { this.applicationHistoryManager = applicationHistoryManager; - this.applicationTimelineStore = applicationTimelineStore; + this.timelineStore = timelineStore; } @Override public void setup() { bind(YarnJacksonJaxbJsonProvider.class); bind(AHSWebServices.class); - bind(ATSWebServices.class); + bind(TimelineWebServices.class); bind(GenericExceptionHandler.class); bind(ApplicationContext.class).toInstance(applicationHistoryManager); - bind(ApplicationTimelineStore.class).toInstance(applicationTimelineStore); + bind(TimelineStore.class).toInstance(timelineStore); route("/", AHSController.class); route(pajoin("/apps", APP_STATE), AHSController.class); route(pajoin("/app", APPLICATION_ID), AHSController.class, "app"); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java deleted file mode 100644 index baf00d6..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java +++ /dev/null @@ -1,328 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.EnumSet; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineReader.Field; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.NameValuePair; -import org.apache.hadoop.yarn.webapp.BadRequestException; - -import com.google.inject.Inject; -import com.google.inject.Singleton; - -@Singleton -@Path("/ws/v1/apptimeline") -//TODO: support XML serialization/deserialization -public class ATSWebServices { - - private static final Log LOG = LogFactory.getLog(ATSWebServices.class); - - private ApplicationTimelineStore store; - - @Inject - public ATSWebServices(ApplicationTimelineStore store) { - this.store = store; - } - - @XmlRootElement(name = "about") - @XmlAccessorType(XmlAccessType.NONE) - @Public - @Unstable - public static class AboutInfo { - - private String about; - - public AboutInfo() { - - } - - public AboutInfo(String about) { - this.about = about; - } - - @XmlElement(name = "About") - public String getAbout() { - return about; - } - - public void setAbout(String about) { - this.about = about; - } - - } - - /** - * Return the description of the application timeline web services. - */ - @GET - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public AboutInfo about( - @Context HttpServletRequest req, - @Context HttpServletResponse res) { - init(res); - return new AboutInfo("Application Timeline API"); - } - - /** - * Return a list of entities that match the given parameters. - */ - @GET - @Path("/{entityType}") - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public ATSEntities getEntities( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - @PathParam("entityType") String entityType, - @QueryParam("primaryFilter") String primaryFilter, - @QueryParam("secondaryFilter") String secondaryFilter, - @QueryParam("windowStart") String windowStart, - @QueryParam("windowEnd") String windowEnd, - @QueryParam("limit") String limit, - @QueryParam("fields") String fields) { - init(res); - ATSEntities entities = null; - try { - entities = store.getEntities( - parseStr(entityType), - parseLongStr(limit), - parseLongStr(windowStart), - parseLongStr(windowEnd), - parsePairStr(primaryFilter, ":"), - parsePairsStr(secondaryFilter, ",", ":"), - parseFieldsStr(fields, ",")); - } catch (NumberFormatException e) { - throw new BadRequestException( - "windowStart, windowEnd or limit is not a numeric value."); - } catch (IllegalArgumentException e) { - throw new BadRequestException("requested invalid field."); - } catch (IOException e) { - LOG.error("Error getting entities", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - if (entities == null) { - return new ATSEntities(); - } - return entities; - } - - /** - * Return a single entity of the given entity type and Id. - */ - @GET - @Path("/{entityType}/{entityId}") - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public ATSEntity getEntity( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - @PathParam("entityType") String entityType, - @PathParam("entityId") String entityId, - @QueryParam("fields") String fields) { - init(res); - ATSEntity entity = null; - try { - entity = - store.getEntity(parseStr(entityId), parseStr(entityType), - parseFieldsStr(fields, ",")); - } catch (IllegalArgumentException e) { - throw new BadRequestException( - "requested invalid field."); - } catch (IOException e) { - LOG.error("Error getting entity", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - if (entity == null) { - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - return entity; - } - - /** - * Return the events that match the given parameters. - */ - @GET - @Path("/{entityType}/events") - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public ATSEvents getEvents( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - @PathParam("entityType") String entityType, - @QueryParam("entityId") String entityId, - @QueryParam("eventType") String eventType, - @QueryParam("windowStart") String windowStart, - @QueryParam("windowEnd") String windowEnd, - @QueryParam("limit") String limit) { - init(res); - ATSEvents events = null; - try { - events = store.getEntityTimelines( - parseStr(entityType), - parseArrayStr(entityId, ","), - parseLongStr(limit), - parseLongStr(windowStart), - parseLongStr(windowEnd), - parseArrayStr(eventType, ",")); - } catch (NumberFormatException e) { - throw new BadRequestException( - "windowStart, windowEnd or limit is not a numeric value."); - } catch (IOException e) { - LOG.error("Error getting entity timelines", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - if (events == null) { - return new ATSEvents(); - } - return events; - } - - /** - * Store the given entities into the timeline store, and return the errors - * that happen during storing. - */ - @POST - @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public ATSPutErrors postEntities( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - ATSEntities entities) { - init(res); - if (entities == null) { - return new ATSPutErrors(); - } - try { - return store.put(entities); - } catch (IOException e) { - LOG.error("Error putting entities", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - } - - private void init(HttpServletResponse response) { - response.setContentType(null); - } - - private static SortedSet parseArrayStr(String str, String delimiter) { - if (str == null) { - return null; - } - SortedSet strSet = new TreeSet(); - String[] strs = str.split(delimiter); - for (String aStr : strs) { - strSet.add(aStr.trim()); - } - return strSet; - } - - private static NameValuePair parsePairStr(String str, String delimiter) { - if (str == null) { - return null; - } - String[] strs = str.split(delimiter, 2); - return new NameValuePair(strs[0].trim(), strs[1].trim()); - } - - private static Collection parsePairsStr( - String str, String aDelimiter, String pDelimiter) { - if (str == null) { - return null; - } - String[] strs = str.split(aDelimiter); - Set pairs = new HashSet(); - for (String aStr : strs) { - pairs.add(parsePairStr(aStr, pDelimiter)); - } - return pairs; - } - - private static EnumSet parseFieldsStr(String str, String delimiter) { - if (str == null) { - return null; - } - String[] strs = str.split(delimiter); - List fieldList = new ArrayList(); - for (String s : strs) { - s = s.trim().toUpperCase(); - if (s.equals("EVENTS")) - fieldList.add(Field.EVENTS); - else if (s.equals("LASTEVENTONLY")) - fieldList.add(Field.LAST_EVENT_ONLY); - else if (s.equals("RELATEDENTITIES")) - fieldList.add(Field.RELATED_ENTITIES); - else if (s.equals("PRIMARYFILTERS")) - fieldList.add(Field.PRIMARY_FILTERS); - else if (s.equals("OTHERINFO")) - fieldList.add(Field.OTHER_INFO); - } - if (fieldList.size() == 0) - return null; - Field f1 = fieldList.remove(fieldList.size() - 1); - if (fieldList.size() == 0) - return EnumSet.of(f1); - else - return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()])); - } - - private static Long parseLongStr(String str) { - return str == null ? null : Long.parseLong(str.trim()); - } - - private static String parseStr(String str) { - return str == null ? null : str.trim(); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java new file mode 100644 index 0000000..7f722ad --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java @@ -0,0 +1,328 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field; +import org.apache.hadoop.yarn.webapp.BadRequestException; + +import com.google.inject.Inject; +import com.google.inject.Singleton; + +@Singleton +@Path("/ws/v1/timeline") +//TODO: support XML serialization/deserialization +public class TimelineWebServices { + + private static final Log LOG = LogFactory.getLog(TimelineWebServices.class); + + private TimelineStore store; + + @Inject + public TimelineWebServices(TimelineStore store) { + this.store = store; + } + + @XmlRootElement(name = "about") + @XmlAccessorType(XmlAccessType.NONE) + @Public + @Unstable + public static class AboutInfo { + + private String about; + + public AboutInfo() { + + } + + public AboutInfo(String about) { + this.about = about; + } + + @XmlElement(name = "About") + public String getAbout() { + return about; + } + + public void setAbout(String about) { + this.about = about; + } + + } + + /** + * Return the description of the timeline web services. + */ + @GET + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public AboutInfo about( + @Context HttpServletRequest req, + @Context HttpServletResponse res) { + init(res); + return new AboutInfo("Timeline API"); + } + + /** + * Return a list of entities that match the given parameters. + */ + @GET + @Path("/{entityType}") + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelineEntities getEntities( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @PathParam("entityType") String entityType, + @QueryParam("primaryFilter") String primaryFilter, + @QueryParam("secondaryFilter") String secondaryFilter, + @QueryParam("windowStart") String windowStart, + @QueryParam("windowEnd") String windowEnd, + @QueryParam("limit") String limit, + @QueryParam("fields") String fields) { + init(res); + TimelineEntities entities = null; + try { + entities = store.getEntities( + parseStr(entityType), + parseLongStr(limit), + parseLongStr(windowStart), + parseLongStr(windowEnd), + parsePairStr(primaryFilter, ":"), + parsePairsStr(secondaryFilter, ",", ":"), + parseFieldsStr(fields, ",")); + } catch (NumberFormatException e) { + throw new BadRequestException( + "windowStart, windowEnd or limit is not a numeric value."); + } catch (IllegalArgumentException e) { + throw new BadRequestException("requested invalid field."); + } catch (IOException e) { + LOG.error("Error getting entities", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + if (entities == null) { + return new TimelineEntities(); + } + return entities; + } + + /** + * Return a single entity of the given entity type and Id. + */ + @GET + @Path("/{entityType}/{entityId}") + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelineEntity getEntity( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @PathParam("entityType") String entityType, + @PathParam("entityId") String entityId, + @QueryParam("fields") String fields) { + init(res); + TimelineEntity entity = null; + try { + entity = + store.getEntity(parseStr(entityId), parseStr(entityType), + parseFieldsStr(fields, ",")); + } catch (IllegalArgumentException e) { + throw new BadRequestException( + "requested invalid field."); + } catch (IOException e) { + LOG.error("Error getting entity", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + if (entity == null) { + throw new WebApplicationException(Response.Status.NOT_FOUND); + } + return entity; + } + + /** + * Return the events that match the given parameters. + */ + @GET + @Path("/{entityType}/events") + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelineEvents getEvents( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @PathParam("entityType") String entityType, + @QueryParam("entityId") String entityId, + @QueryParam("eventType") String eventType, + @QueryParam("windowStart") String windowStart, + @QueryParam("windowEnd") String windowEnd, + @QueryParam("limit") String limit) { + init(res); + TimelineEvents events = null; + try { + events = store.getEntityTimelines( + parseStr(entityType), + parseArrayStr(entityId, ","), + parseLongStr(limit), + parseLongStr(windowStart), + parseLongStr(windowEnd), + parseArrayStr(eventType, ",")); + } catch (NumberFormatException e) { + throw new BadRequestException( + "windowStart, windowEnd or limit is not a numeric value."); + } catch (IOException e) { + LOG.error("Error getting entity timelines", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + if (events == null) { + return new TimelineEvents(); + } + return events; + } + + /** + * Store the given entities into the timeline store, and return the errors + * that happen during storing. + */ + @POST + @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelinePutResponse postEntities( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + TimelineEntities entities) { + init(res); + if (entities == null) { + return new TimelinePutResponse(); + } + try { + return store.put(entities); + } catch (IOException e) { + LOG.error("Error putting entities", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + } + + private void init(HttpServletResponse response) { + response.setContentType(null); + } + + private static SortedSet parseArrayStr(String str, String delimiter) { + if (str == null) { + return null; + } + SortedSet strSet = new TreeSet(); + String[] strs = str.split(delimiter); + for (String aStr : strs) { + strSet.add(aStr.trim()); + } + return strSet; + } + + private static NameValuePair parsePairStr(String str, String delimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(delimiter, 2); + return new NameValuePair(strs[0].trim(), strs[1].trim()); + } + + private static Collection parsePairsStr( + String str, String aDelimiter, String pDelimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(aDelimiter); + Set pairs = new HashSet(); + for (String aStr : strs) { + pairs.add(parsePairStr(aStr, pDelimiter)); + } + return pairs; + } + + private static EnumSet parseFieldsStr(String str, String delimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(delimiter); + List fieldList = new ArrayList(); + for (String s : strs) { + s = s.trim().toUpperCase(); + if (s.equals("EVENTS")) + fieldList.add(Field.EVENTS); + else if (s.equals("LASTEVENTONLY")) + fieldList.add(Field.LAST_EVENT_ONLY); + else if (s.equals("RELATEDENTITIES")) + fieldList.add(Field.RELATED_ENTITIES); + else if (s.equals("PRIMARYFILTERS")) + fieldList.add(Field.PRIMARY_FILTERS); + else if (s.equals("OTHERINFO")) + fieldList.add(Field.OTHER_INFO); + } + if (fieldList.size() == 0) + return null; + Field f1 = fieldList.remove(fieldList.size() - 1); + if (fieldList.size() == 0) + return EnumSet.of(f1); + else + return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()])); + } + + private static Long parseLongStr(String str) { + return str == null ? null : Long.parseLong(str.trim()); + } + + private static String parseStr(String str) { + return str == null ? null : str.trim(); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java deleted file mode 100644 index eb3b06d..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java +++ /dev/null @@ -1,563 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineReader.Field; - -public class ApplicationTimelineStoreTestUtils { - - protected static final Map EMPTY_MAP = - Collections.emptyMap(); - protected static final Map> EMPTY_PRIMARY_FILTERS = - Collections.emptyMap(); - protected static final Map> EMPTY_REL_ENTITIES = - Collections.emptyMap(); - - protected ApplicationTimelineStore store; - protected String entity1; - protected String entityType1; - protected String entity1b; - protected String entity2; - protected String entityType2; - protected Map> primaryFilters; - protected Map secondaryFilters; - protected Map allFilters; - protected Map otherInfo; - protected Map> relEntityMap; - protected NameValuePair userFilter; - protected Collection goodTestingFilters; - protected Collection badTestingFilters; - protected ATSEvent ev1; - protected ATSEvent ev2; - protected ATSEvent ev3; - protected ATSEvent ev4; - protected Map eventInfo; - protected List events1; - protected List events2; - - /** - * Load test data into the given store - */ - protected void loadTestData() throws IOException { - ATSEntities atsEntities = new ATSEntities(); - Map> primaryFilters = - new HashMap>(); - Set l1 = new HashSet(); - l1.add("username"); - Set l2 = new HashSet(); - l2.add(12345l); - primaryFilters.put("user", l1); - primaryFilters.put("appname", l2); - Map secondaryFilters = new HashMap(); - secondaryFilters.put("startTime", 123456l); - secondaryFilters.put("status", "RUNNING"); - Map otherInfo1 = new HashMap(); - otherInfo1.put("info1", "val1"); - otherInfo1.putAll(secondaryFilters); - - String entity1 = "id_1"; - String entityType1 = "type_1"; - String entity1b = "id_2"; - String entity2 = "id_2"; - String entityType2 = "type_2"; - - Map> relatedEntities = - new HashMap>(); - relatedEntities.put(entityType2, Collections.singleton(entity2)); - - ATSEvent ev3 = createEvent(789l, "launch_event", null); - ATSEvent ev4 = createEvent(-123l, "init_event", null); - List events = new ArrayList(); - events.add(ev3); - events.add(ev4); - atsEntities.setEntities(Collections.singletonList(createEntity(entity2, - entityType2, null, events, null, null, null))); - ATSPutErrors response = store.put(atsEntities); - assertEquals(0, response.getErrors().size()); - - ATSEvent ev1 = createEvent(123l, "start_event", null); - atsEntities.setEntities(Collections.singletonList(createEntity(entity1, - entityType1, 123l, Collections.singletonList(ev1), - relatedEntities, primaryFilters, otherInfo1))); - response = store.put(atsEntities); - assertEquals(0, response.getErrors().size()); - atsEntities.setEntities(Collections.singletonList(createEntity(entity1b, - entityType1, null, Collections.singletonList(ev1), relatedEntities, - primaryFilters, otherInfo1))); - response = store.put(atsEntities); - assertEquals(0, response.getErrors().size()); - - Map eventInfo = new HashMap(); - eventInfo.put("event info 1", "val1"); - ATSEvent ev2 = createEvent(456l, "end_event", eventInfo); - Map otherInfo2 = new HashMap(); - otherInfo2.put("info2", "val2"); - atsEntities.setEntities(Collections.singletonList(createEntity(entity1, - entityType1, null, Collections.singletonList(ev2), null, - primaryFilters, otherInfo2))); - response = store.put(atsEntities); - assertEquals(0, response.getErrors().size()); - atsEntities.setEntities(Collections.singletonList(createEntity(entity1b, - entityType1, 789l, Collections.singletonList(ev2), null, - primaryFilters, otherInfo2))); - response = store.put(atsEntities); - assertEquals(0, response.getErrors().size()); - - atsEntities.setEntities(Collections.singletonList(createEntity( - "badentityid", "badentity", null, null, null, null, otherInfo1))); - response = store.put(atsEntities); - assertEquals(1, response.getErrors().size()); - ATSPutError error = response.getErrors().get(0); - assertEquals("badentityid", error.getEntityId()); - assertEquals("badentity", error.getEntityType()); - assertEquals(ATSPutError.NO_START_TIME, error.getErrorCode()); - } - - /** - * Load verification data - */ - protected void loadVerificationData() throws Exception { - userFilter = new NameValuePair("user", - "username"); - goodTestingFilters = new ArrayList(); - goodTestingFilters.add(new NameValuePair("appname", 12345l)); - goodTestingFilters.add(new NameValuePair("status", "RUNNING")); - badTestingFilters = new ArrayList(); - badTestingFilters.add(new NameValuePair("appname", 12345l)); - badTestingFilters.add(new NameValuePair("status", "FINISHED")); - - primaryFilters = new HashMap>(); - Set l1 = new HashSet(); - l1.add("username"); - Set l2 = new HashSet(); - l2.add(12345l); - primaryFilters.put("user", l1); - primaryFilters.put("appname", l2); - secondaryFilters = new HashMap(); - secondaryFilters.put("startTime", 123456l); - secondaryFilters.put("status", "RUNNING"); - allFilters = new HashMap(); - allFilters.putAll(secondaryFilters); - for (Entry> pf : primaryFilters.entrySet()) { - for (Object o : pf.getValue()) { - allFilters.put(pf.getKey(), o); - } - } - otherInfo = new HashMap(); - otherInfo.put("info1", "val1"); - otherInfo.put("info2", "val2"); - otherInfo.putAll(secondaryFilters); - - entity1 = "id_1"; - entityType1 = "type_1"; - entity1b = "id_2"; - entity2 = "id_2"; - entityType2 = "type_2"; - - ev1 = createEvent(123l, "start_event", null); - - eventInfo = new HashMap(); - eventInfo.put("event info 1", "val1"); - ev2 = createEvent(456l, "end_event", eventInfo); - events1 = new ArrayList(); - events1.add(ev2); - events1.add(ev1); - - relEntityMap = - new HashMap>(); - Set ids = new HashSet(); - ids.add(entity1); - ids.add(entity1b); - relEntityMap.put(entityType1, ids); - - ev3 = createEvent(789l, "launch_event", null); - ev4 = createEvent(-123l, "init_event", null); - events2 = new ArrayList(); - events2.add(ev3); - events2.add(ev4); - } - - public void testGetSingleEntity() throws IOException { - // test getting entity info - verifyEntityInfo(null, null, null, null, null, null, - store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class))); - - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, store.getEntity(entity1, entityType1, - EnumSet.allOf(Field.class))); - - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, store.getEntity(entity1b, entityType1, - EnumSet.allOf(Field.class))); - - verifyEntityInfo(entity2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, store.getEntity(entity2, entityType2, - EnumSet.allOf(Field.class))); - - // test getting single fields - verifyEntityInfo(entity1, entityType1, events1, null, null, null, - store.getEntity(entity1, entityType1, EnumSet.of(Field.EVENTS))); - - verifyEntityInfo(entity1, entityType1, Collections.singletonList(ev2), - null, null, null, store.getEntity(entity1, entityType1, - EnumSet.of(Field.LAST_EVENT_ONLY))); - - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, store.getEntity(entity1b, entityType1, - null)); - - verifyEntityInfo(entity1, entityType1, null, null, primaryFilters, null, - store.getEntity(entity1, entityType1, - EnumSet.of(Field.PRIMARY_FILTERS))); - - verifyEntityInfo(entity1, entityType1, null, null, null, otherInfo, - store.getEntity(entity1, entityType1, EnumSet.of(Field.OTHER_INFO))); - - verifyEntityInfo(entity2, entityType2, null, relEntityMap, null, null, - store.getEntity(entity2, entityType2, - EnumSet.of(Field.RELATED_ENTITIES))); - } - - public void testGetEntities() throws IOException { - // test getting entities - assertEquals("nonzero entities size for nonexistent type", 0, - store.getEntities("type_0", null, null, null, null, null, - null).getEntities().size()); - assertEquals("nonzero entities size for nonexistent type", 0, - store.getEntities("type_3", null, null, null, null, null, - null).getEntities().size()); - assertEquals("nonzero entities size for nonexistent type", 0, - store.getEntities("type_0", null, null, null, userFilter, - null, null).getEntities().size()); - assertEquals("nonzero entities size for nonexistent type", 0, - store.getEntities("type_3", null, null, null, userFilter, - null, null).getEntities().size()); - - List entities = - store.getEntities("type_1", null, null, null, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = store.getEntities("type_2", null, null, null, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(1, entities.size()); - verifyEntityInfo(entity2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); - - entities = store.getEntities("type_1", 1l, null, null, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(1, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = store.getEntities("type_1", 1l, 0l, null, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(1, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = store.getEntities("type_1", null, 234l, null, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", null, 123l, null, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", null, 234l, 345l, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", null, null, 345l, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = store.getEntities("type_1", null, null, 123l, null, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - } - - public void testGetEntitiesWithPrimaryFilters() throws IOException { - // test using primary filter - assertEquals("nonzero entities size for primary filter", 0, - store.getEntities("type_1", null, null, null, - new NameValuePair("none", "none"), null, - EnumSet.allOf(Field.class)).getEntities().size()); - assertEquals("nonzero entities size for primary filter", 0, - store.getEntities("type_2", null, null, null, - new NameValuePair("none", "none"), null, - EnumSet.allOf(Field.class)).getEntities().size()); - assertEquals("nonzero entities size for primary filter", 0, - store.getEntities("type_3", null, null, null, - new NameValuePair("none", "none"), null, - EnumSet.allOf(Field.class)).getEntities().size()); - - List entities = store.getEntities("type_1", null, null, null, - userFilter, null, EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = store.getEntities("type_2", null, null, null, userFilter, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", 1l, null, null, userFilter, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(1, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = store.getEntities("type_1", 1l, 0l, null, userFilter, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(1, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = store.getEntities("type_1", null, 234l, null, userFilter, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", null, 234l, 345l, userFilter, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", null, null, 345l, userFilter, null, - EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - } - - public void testGetEntitiesWithSecondaryFilters() throws IOException { - // test using secondary filter - List entities = store.getEntities("type_1", null, null, null, - null, goodTestingFilters, EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = store.getEntities("type_1", null, null, null, userFilter, - goodTestingFilters, EnumSet.allOf(Field.class)).getEntities(); - assertEquals(2, entities.size()); - verifyEntityInfo(entity1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = store.getEntities("type_1", null, null, null, null, - badTestingFilters, EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - - entities = store.getEntities("type_1", null, null, null, userFilter, - badTestingFilters, EnumSet.allOf(Field.class)).getEntities(); - assertEquals(0, entities.size()); - } - - public void testGetEvents() throws IOException { - // test getting entity timelines - SortedSet sortedSet = new TreeSet(); - sortedSet.add(entity1); - List timelines = - store.getEntityTimelines(entityType1, sortedSet, null, null, - null, null).getAllEvents(); - assertEquals(1, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2, ev1); - - sortedSet.add(entity1b); - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2, ev1); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2, ev1); - - timelines = store.getEntityTimelines(entityType1, sortedSet, 1l, - null, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - 345l, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - 123l, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, 345l, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev1); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev1); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, 123l, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev1); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev1); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, null, Collections.singleton("end_event")).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entity1b, entityType1, ev2); - - sortedSet.add(entity2); - timelines = store.getEntityTimelines(entityType2, sortedSet, null, - null, null, null).getAllEvents(); - assertEquals(1, timelines.size()); - verifyEntityTimeline(timelines.get(0), entity2, entityType2, ev3, ev4); - } - - /** - * Verify a single entity - */ - protected static void verifyEntityInfo(String entity, String entityType, - List events, Map> relatedEntities, - Map> primaryFilters, Map otherInfo, - ATSEntity retrievedEntityInfo) { - if (entity == null) { - assertNull(retrievedEntityInfo); - return; - } - assertEquals(entity, retrievedEntityInfo.getEntityId()); - assertEquals(entityType, retrievedEntityInfo.getEntityType()); - if (events == null) { - assertNull(retrievedEntityInfo.getEvents()); - } else { - assertEquals(events, retrievedEntityInfo.getEvents()); - } - if (relatedEntities == null) { - assertNull(retrievedEntityInfo.getRelatedEntities()); - } else { - assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities()); - } - if (primaryFilters == null) { - assertNull(retrievedEntityInfo.getPrimaryFilters()); - } else { - assertTrue(primaryFilters.equals( - retrievedEntityInfo.getPrimaryFilters())); - } - if (otherInfo == null) { - assertNull(retrievedEntityInfo.getOtherInfo()); - } else { - assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo())); - } - } - - /** - * Verify timeline events - */ - private static void verifyEntityTimeline( - ATSEventsOfOneEntity retrievedEvents, String entity, String entityType, - ATSEvent... actualEvents) { - assertEquals(entity, retrievedEvents.getEntityId()); - assertEquals(entityType, retrievedEvents.getEntityType()); - assertEquals(actualEvents.length, retrievedEvents.getEvents().size()); - for (int i = 0; i < actualEvents.length; i++) { - assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i)); - } - } - - /** - * Create a test entity - */ - protected static ATSEntity createEntity(String entity, String entityType, - Long startTime, List events, - Map> relatedEntities, - Map> primaryFilters, - Map otherInfo) { - ATSEntity atsEntity = new ATSEntity(); - atsEntity.setEntityId(entity); - atsEntity.setEntityType(entityType); - atsEntity.setStartTime(startTime); - atsEntity.setEvents(events); - if (relatedEntities != null) { - for (Entry> e : relatedEntities.entrySet()) { - for (String v : e.getValue()) { - atsEntity.addRelatedEntity(e.getKey(), v); - } - } - } else { - atsEntity.setRelatedEntities(null); - } - atsEntity.setPrimaryFilters(primaryFilters); - atsEntity.setOtherInfo(otherInfo); - return atsEntity; - } - - /** - * Create a test event - */ - private static ATSEvent createEvent(long timestamp, String type, Map info) { - ATSEvent event = new ATSEvent(); - event.setTimestamp(timestamp); - event.setEventType(type); - event.setEventInfo(info); - return event; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java deleted file mode 100644 index 4bb453a..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.io.WritableComparator; -import org.junit.Test; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class TestGenericObjectMapper { - - @Test - public void testEncoding() { - testEncoding(Long.MAX_VALUE); - testEncoding(Long.MIN_VALUE); - testEncoding(0l); - testEncoding(128l); - testEncoding(256l); - testEncoding(512l); - testEncoding(-256l); - } - - private static void testEncoding(long l) { - byte[] b = GenericObjectMapper.writeReverseOrderedLong(l); - assertEquals("error decoding", l, - GenericObjectMapper.readReverseOrderedLong(b, 0)); - byte[] buf = new byte[16]; - System.arraycopy(b, 0, buf, 5, 8); - assertEquals("error decoding at offset", l, - GenericObjectMapper.readReverseOrderedLong(buf, 5)); - if (l > Long.MIN_VALUE) { - byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1); - assertEquals("error preserving ordering", 1, - WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length)); - } - if (l < Long.MAX_VALUE) { - byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1); - assertEquals("error preserving ordering", 1, - WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length)); - } - } - - private static void verify(Object o) throws IOException { - assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o))); - } - - @Test - public void testValueTypes() throws IOException { - verify(42l); - verify(42); - verify(1.23); - verify("abc"); - verify(true); - List list = new ArrayList(); - list.add("123"); - list.add("abc"); - verify(list); - Map map = new HashMap(); - map.put("k1","v1"); - map.put("k2","v2"); - verify(map); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java deleted file mode 100644 index b868049..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import java.io.File; -import java.io.IOException; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class TestLeveldbApplicationTimelineStore - extends ApplicationTimelineStoreTestUtils { - private FileContext fsContext; - private File fsPath; - - @Before - public void setup() throws Exception { - fsContext = FileContext.getLocalFSFileContext(); - Configuration conf = new Configuration(); - fsPath = new File("target", this.getClass().getSimpleName() + - "-tmpDir").getAbsoluteFile(); - fsContext.delete(new Path(fsPath.getAbsolutePath()), true); - conf.set(YarnConfiguration.ATS_LEVELDB_PATH_PROPERTY, - fsPath.getAbsolutePath()); - store = new LeveldbApplicationTimelineStore(); - store.init(conf); - store.start(); - loadTestData(); - loadVerificationData(); - } - - @After - public void tearDown() throws Exception { - store.stop(); - fsContext.delete(new Path(fsPath.getAbsolutePath()), true); - } - - @Test - public void testGetSingleEntity() throws IOException { - super.testGetSingleEntity(); - ((LeveldbApplicationTimelineStore)store).clearStartTimeCache(); - super.testGetSingleEntity(); - } - - @Test - public void testGetEntities() throws IOException { - super.testGetEntities(); - } - - @Test - public void testGetEntitiesWithPrimaryFilters() throws IOException { - super.testGetEntitiesWithPrimaryFilters(); - } - - @Test - public void testGetEntitiesWithSecondaryFilters() throws IOException { - super.testGetEntitiesWithSecondaryFilters(); - } - - @Test - public void testGetEvents() throws IOException { - super.testGetEvents(); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java deleted file mode 100644 index 07a3955..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; - -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -public class TestMemoryApplicationTimelineStore - extends ApplicationTimelineStoreTestUtils { - - @Before - public void setup() throws Exception { - store = new MemoryApplicationTimelineStore(); - store.init(new YarnConfiguration()); - store.start(); - loadTestData(); - loadVerificationData(); - } - - @After - public void tearDown() throws Exception { - store.stop(); - } - - public ApplicationTimelineStore getApplicationTimelineStore() { - return store; - } - - @Test - public void testGetSingleEntity() throws IOException { - super.testGetSingleEntity(); - } - - @Test - public void testGetEntities() throws IOException { - super.testGetEntities(); - } - - @Test - public void testGetEntitiesWithPrimaryFilters() throws IOException { - super.testGetEntitiesWithPrimaryFilters(); - } - - @Test - public void testGetEntitiesWithSecondaryFilters() throws IOException { - super.testGetEntitiesWithSecondaryFilters(); - } - - @Test - public void testGetEvents() throws IOException { - super.testGetEvents(); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java new file mode 100644 index 0000000..676972b --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java @@ -0,0 +1,90 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.WritableComparator; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class TestGenericObjectMapper { + + @Test + public void testEncoding() { + testEncoding(Long.MAX_VALUE); + testEncoding(Long.MIN_VALUE); + testEncoding(0l); + testEncoding(128l); + testEncoding(256l); + testEncoding(512l); + testEncoding(-256l); + } + + private static void testEncoding(long l) { + byte[] b = GenericObjectMapper.writeReverseOrderedLong(l); + assertEquals("error decoding", l, + GenericObjectMapper.readReverseOrderedLong(b, 0)); + byte[] buf = new byte[16]; + System.arraycopy(b, 0, buf, 5, 8); + assertEquals("error decoding at offset", l, + GenericObjectMapper.readReverseOrderedLong(buf, 5)); + if (l > Long.MIN_VALUE) { + byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1); + assertEquals("error preserving ordering", 1, + WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length)); + } + if (l < Long.MAX_VALUE) { + byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1); + assertEquals("error preserving ordering", 1, + WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length)); + } + } + + private static void verify(Object o) throws IOException { + assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o))); + } + + @Test + public void testValueTypes() throws IOException { + verify(42l); + verify(42); + verify(1.23); + verify("abc"); + verify(true); + List list = new ArrayList(); + list.add("123"); + list.add("abc"); + verify(list); + Map map = new HashMap(); + map.put("k1","v1"); + map.put("k2","v2"); + verify(map); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java new file mode 100644 index 0000000..206aee9 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java @@ -0,0 +1,89 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import java.io.File; +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class TestLeveldbTimelineStore + extends TimelineStoreTestUtils { + private FileContext fsContext; + private File fsPath; + + @Before + public void setup() throws Exception { + fsContext = FileContext.getLocalFSFileContext(); + Configuration conf = new Configuration(); + fsPath = new File("target", this.getClass().getSimpleName() + + "-tmpDir").getAbsoluteFile(); + fsContext.delete(new Path(fsPath.getAbsolutePath()), true); + conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH, + fsPath.getAbsolutePath()); + store = new LeveldbTimelineStore(); + store.init(conf); + store.start(); + loadTestData(); + loadVerificationData(); + } + + @After + public void tearDown() throws Exception { + store.stop(); + fsContext.delete(new Path(fsPath.getAbsolutePath()), true); + } + + @Test + public void testGetSingleEntity() throws IOException { + super.testGetSingleEntity(); + ((LeveldbTimelineStore)store).clearStartTimeCache(); + super.testGetSingleEntity(); + } + + @Test + public void testGetEntities() throws IOException { + super.testGetEntities(); + } + + @Test + public void testGetEntitiesWithPrimaryFilters() throws IOException { + super.testGetEntitiesWithPrimaryFilters(); + } + + @Test + public void testGetEntitiesWithSecondaryFilters() throws IOException { + super.testGetEntitiesWithSecondaryFilters(); + } + + @Test + public void testGetEvents() throws IOException { + super.testGetEvents(); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java new file mode 100644 index 0000000..49ab53f --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; + +public class TestMemoryTimelineStore + extends TimelineStoreTestUtils { + + @Before + public void setup() throws Exception { + store = new MemoryTimelineStore(); + store.init(new YarnConfiguration()); + store.start(); + loadTestData(); + loadVerificationData(); + } + + @After + public void tearDown() throws Exception { + store.stop(); + } + + public TimelineStore getTimelineStore() { + return store; + } + + @Test + public void testGetSingleEntity() throws IOException { + super.testGetSingleEntity(); + } + + @Test + public void testGetEntities() throws IOException { + super.testGetEntities(); + } + + @Test + public void testGetEntitiesWithPrimaryFilters() throws IOException { + super.testGetEntitiesWithPrimaryFilters(); + } + + @Test + public void testGetEntitiesWithSecondaryFilters() throws IOException { + super.testGetEntitiesWithSecondaryFilters(); + } + + @Test + public void testGetEvents() throws IOException { + super.testGetEvents(); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java new file mode 100644 index 0000000..8645c34 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java @@ -0,0 +1,565 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field; + +public class TimelineStoreTestUtils { + + protected static final Map EMPTY_MAP = + Collections.emptyMap(); + protected static final Map> EMPTY_PRIMARY_FILTERS = + Collections.emptyMap(); + protected static final Map> EMPTY_REL_ENTITIES = + Collections.emptyMap(); + + protected TimelineStore store; + protected String entityId1; + protected String entityType1; + protected String entityId1b; + protected String entityId2; + protected String entityType2; + protected Map> primaryFilters; + protected Map secondaryFilters; + protected Map allFilters; + protected Map otherInfo; + protected Map> relEntityMap; + protected NameValuePair userFilter; + protected Collection goodTestingFilters; + protected Collection badTestingFilters; + protected TimelineEvent ev1; + protected TimelineEvent ev2; + protected TimelineEvent ev3; + protected TimelineEvent ev4; + protected Map eventInfo; + protected List events1; + protected List events2; + + /** + * Load test data into the given store + */ + protected void loadTestData() throws IOException { + TimelineEntities entities = new TimelineEntities(); + Map> primaryFilters = + new HashMap>(); + Set l1 = new HashSet(); + l1.add("username"); + Set l2 = new HashSet(); + l2.add(12345l); + primaryFilters.put("user", l1); + primaryFilters.put("appname", l2); + Map secondaryFilters = new HashMap(); + secondaryFilters.put("startTime", 123456l); + secondaryFilters.put("status", "RUNNING"); + Map otherInfo1 = new HashMap(); + otherInfo1.put("info1", "val1"); + otherInfo1.putAll(secondaryFilters); + + String entityId1 = "id_1"; + String entityType1 = "type_1"; + String entityId1b = "id_2"; + String entityId2 = "id_2"; + String entityType2 = "type_2"; + + Map> relatedEntities = + new HashMap>(); + relatedEntities.put(entityType2, Collections.singleton(entityId2)); + + TimelineEvent ev3 = createEvent(789l, "launch_event", null); + TimelineEvent ev4 = createEvent(-123l, "init_event", null); + List events = new ArrayList(); + events.add(ev3); + events.add(ev4); + entities.setEntities(Collections.singletonList(createEntity(entityId2, + entityType2, null, events, null, null, null))); + TimelinePutResponse response = store.put(entities); + assertEquals(0, response.getErrors().size()); + + TimelineEvent ev1 = createEvent(123l, "start_event", null); + entities.setEntities(Collections.singletonList(createEntity(entityId1, + entityType1, 123l, Collections.singletonList(ev1), + relatedEntities, primaryFilters, otherInfo1))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + entities.setEntities(Collections.singletonList(createEntity(entityId1b, + entityType1, null, Collections.singletonList(ev1), relatedEntities, + primaryFilters, otherInfo1))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + + Map eventInfo = new HashMap(); + eventInfo.put("event info 1", "val1"); + TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo); + Map otherInfo2 = new HashMap(); + otherInfo2.put("info2", "val2"); + entities.setEntities(Collections.singletonList(createEntity(entityId1, + entityType1, null, Collections.singletonList(ev2), null, + primaryFilters, otherInfo2))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + entities.setEntities(Collections.singletonList(createEntity(entityId1b, + entityType1, 789l, Collections.singletonList(ev2), null, + primaryFilters, otherInfo2))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + + entities.setEntities(Collections.singletonList(createEntity( + "badentityid", "badentity", null, null, null, null, otherInfo1))); + response = store.put(entities); + assertEquals(1, response.getErrors().size()); + TimelinePutError error = response.getErrors().get(0); + assertEquals("badentityid", error.getEntityId()); + assertEquals("badentity", error.getEntityType()); + assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode()); + } + + /** + * Load verification data + */ + protected void loadVerificationData() throws Exception { + userFilter = new NameValuePair("user", + "username"); + goodTestingFilters = new ArrayList(); + goodTestingFilters.add(new NameValuePair("appname", 12345l)); + goodTestingFilters.add(new NameValuePair("status", "RUNNING")); + badTestingFilters = new ArrayList(); + badTestingFilters.add(new NameValuePair("appname", 12345l)); + badTestingFilters.add(new NameValuePair("status", "FINISHED")); + + primaryFilters = new HashMap>(); + Set l1 = new HashSet(); + l1.add("username"); + Set l2 = new HashSet(); + l2.add(12345l); + primaryFilters.put("user", l1); + primaryFilters.put("appname", l2); + secondaryFilters = new HashMap(); + secondaryFilters.put("startTime", 123456l); + secondaryFilters.put("status", "RUNNING"); + allFilters = new HashMap(); + allFilters.putAll(secondaryFilters); + for (Entry> pf : primaryFilters.entrySet()) { + for (Object o : pf.getValue()) { + allFilters.put(pf.getKey(), o); + } + } + otherInfo = new HashMap(); + otherInfo.put("info1", "val1"); + otherInfo.put("info2", "val2"); + otherInfo.putAll(secondaryFilters); + + entityId1 = "id_1"; + entityType1 = "type_1"; + entityId1b = "id_2"; + entityId2 = "id_2"; + entityType2 = "type_2"; + + ev1 = createEvent(123l, "start_event", null); + + eventInfo = new HashMap(); + eventInfo.put("event info 1", "val1"); + ev2 = createEvent(456l, "end_event", eventInfo); + events1 = new ArrayList(); + events1.add(ev2); + events1.add(ev1); + + relEntityMap = + new HashMap>(); + Set ids = new HashSet(); + ids.add(entityId1); + ids.add(entityId1b); + relEntityMap.put(entityType1, ids); + + ev3 = createEvent(789l, "launch_event", null); + ev4 = createEvent(-123l, "init_event", null); + events2 = new ArrayList(); + events2.add(ev3); + events2.add(ev4); + } + + public void testGetSingleEntity() throws IOException { + // test getting entity info + verifyEntityInfo(null, null, null, null, null, null, + store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, store.getEntity(entityId1, entityType1, + EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1, + EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, store.getEntity(entityId2, entityType2, + EnumSet.allOf(Field.class))); + + // test getting single fields + verifyEntityInfo(entityId1, entityType1, events1, null, null, null, + store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS))); + + verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2), + null, null, null, store.getEntity(entityId1, entityType1, + EnumSet.of(Field.LAST_EVENT_ONLY))); + + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1, + null)); + + verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null, + store.getEntity(entityId1, entityType1, + EnumSet.of(Field.PRIMARY_FILTERS))); + + verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo, + store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO))); + + verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null, + store.getEntity(entityId2, entityType2, + EnumSet.of(Field.RELATED_ENTITIES))); + } + + public void testGetEntities() throws IOException { + // test getting entities + assertEquals("nonzero entities size for nonexistent type", 0, + store.getEntities("type_0", null, null, null, null, null, + null).getEntities().size()); + assertEquals("nonzero entities size for nonexistent type", 0, + store.getEntities("type_3", null, null, null, null, null, + null).getEntities().size()); + assertEquals("nonzero entities size for nonexistent type", 0, + store.getEntities("type_0", null, null, null, userFilter, + null, null).getEntities().size()); + assertEquals("nonzero entities size for nonexistent type", 0, + store.getEntities("type_3", null, null, null, userFilter, + null, null).getEntities().size()); + + List entities = + store.getEntities("type_1", null, null, null, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = store.getEntities("type_2", null, null, null, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); + + entities = store.getEntities("type_1", 1l, null, null, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = store.getEntities("type_1", 1l, 0l, null, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = store.getEntities("type_1", null, 234l, null, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", null, 123l, null, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", null, 234l, 345l, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", null, null, 345l, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = store.getEntities("type_1", null, null, 123l, null, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + } + + public void testGetEntitiesWithPrimaryFilters() throws IOException { + // test using primary filter + assertEquals("nonzero entities size for primary filter", 0, + store.getEntities("type_1", null, null, null, + new NameValuePair("none", "none"), null, + EnumSet.allOf(Field.class)).getEntities().size()); + assertEquals("nonzero entities size for primary filter", 0, + store.getEntities("type_2", null, null, null, + new NameValuePair("none", "none"), null, + EnumSet.allOf(Field.class)).getEntities().size()); + assertEquals("nonzero entities size for primary filter", 0, + store.getEntities("type_3", null, null, null, + new NameValuePair("none", "none"), null, + EnumSet.allOf(Field.class)).getEntities().size()); + + List entities = store.getEntities("type_1", null, null, null, + userFilter, null, EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = store.getEntities("type_2", null, null, null, userFilter, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", 1l, null, null, userFilter, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = store.getEntities("type_1", 1l, 0l, null, userFilter, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = store.getEntities("type_1", null, 234l, null, userFilter, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", null, 234l, 345l, userFilter, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", null, null, 345l, userFilter, null, + EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + } + + public void testGetEntitiesWithSecondaryFilters() throws IOException { + // test using secondary filter + List entities = store.getEntities("type_1", null, null, null, + null, goodTestingFilters, EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = store.getEntities("type_1", null, null, null, userFilter, + goodTestingFilters, EnumSet.allOf(Field.class)).getEntities(); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = store.getEntities("type_1", null, null, null, null, + badTestingFilters, EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + + entities = store.getEntities("type_1", null, null, null, userFilter, + badTestingFilters, EnumSet.allOf(Field.class)).getEntities(); + assertEquals(0, entities.size()); + } + + public void testGetEvents() throws IOException { + // test getting entity timelines + SortedSet sortedSet = new TreeSet(); + sortedSet.add(entityId1); + List timelines = + store.getEntityTimelines(entityType1, sortedSet, null, null, + null, null).getAllEvents(); + assertEquals(1, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1); + + sortedSet.add(entityId1b); + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1); + + timelines = store.getEntityTimelines(entityType1, sortedSet, 1l, + null, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + 345l, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + 123l, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, 345l, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, 123l, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, null, Collections.singleton("end_event")).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + sortedSet.add(entityId2); + timelines = store.getEntityTimelines(entityType2, sortedSet, null, + null, null, null).getAllEvents(); + assertEquals(1, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4); + } + + /** + * Verify a single entity + */ + protected static void verifyEntityInfo(String entityId, String entityType, + List events, Map> relatedEntities, + Map> primaryFilters, Map otherInfo, + TimelineEntity retrievedEntityInfo) { + if (entityId == null) { + assertNull(retrievedEntityInfo); + return; + } + assertEquals(entityId, retrievedEntityInfo.getEntityId()); + assertEquals(entityType, retrievedEntityInfo.getEntityType()); + if (events == null) { + assertNull(retrievedEntityInfo.getEvents()); + } else { + assertEquals(events, retrievedEntityInfo.getEvents()); + } + if (relatedEntities == null) { + assertNull(retrievedEntityInfo.getRelatedEntities()); + } else { + assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities()); + } + if (primaryFilters == null) { + assertNull(retrievedEntityInfo.getPrimaryFilters()); + } else { + assertTrue(primaryFilters.equals( + retrievedEntityInfo.getPrimaryFilters())); + } + if (otherInfo == null) { + assertNull(retrievedEntityInfo.getOtherInfo()); + } else { + assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo())); + } + } + + /** + * Verify timeline events + */ + private static void verifyEntityTimeline( + EventsOfOneEntity retrievedEvents, String entityId, String entityType, + TimelineEvent... actualEvents) { + assertEquals(entityId, retrievedEvents.getEntityId()); + assertEquals(entityType, retrievedEvents.getEntityType()); + assertEquals(actualEvents.length, retrievedEvents.getEvents().size()); + for (int i = 0; i < actualEvents.length; i++) { + assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i)); + } + } + + /** + * Create a test entity + */ + protected static TimelineEntity createEntity(String entityId, String entityType, + Long startTime, List events, + Map> relatedEntities, + Map> primaryFilters, + Map otherInfo) { + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId(entityId); + entity.setEntityType(entityType); + entity.setStartTime(startTime); + entity.setEvents(events); + if (relatedEntities != null) { + for (Entry> e : relatedEntities.entrySet()) { + for (String v : e.getValue()) { + entity.addRelatedEntity(e.getKey(), v); + } + } + } else { + entity.setRelatedEntities(null); + } + entity.setPrimaryFilters(primaryFilters); + entity.setOtherInfo(otherInfo); + return entity; + } + + /** + * Create a test event + */ + private static TimelineEvent createEvent(long timestamp, String type, Map info) { + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(timestamp); + event.setEventType(type); + event.setEventInfo(info); + return event; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java deleted file mode 100644 index 58a826c..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java +++ /dev/null @@ -1,249 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; - -import static org.junit.Assert.assertEquals; - -import javax.ws.rs.core.MediaType; - -import junit.framework.Assert; - -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents; -import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.ApplicationTimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.TestMemoryApplicationTimelineStore; -import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; -import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; -import org.junit.Test; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.servlet.GuiceServletContextListener; -import com.google.inject.servlet.ServletModule; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; -import com.sun.jersey.test.framework.JerseyTest; -import com.sun.jersey.test.framework.WebAppDescriptor; - - -public class TestATSWebServices extends JerseyTest { - - private static ApplicationTimelineStore store; - - private Injector injector = Guice.createInjector(new ServletModule() { - - @Override - protected void configureServlets() { - bind(YarnJacksonJaxbJsonProvider.class); - bind(ATSWebServices.class); - bind(GenericExceptionHandler.class); - try{ - store = mockApplicationTimelineStore(); - } catch (Exception e) { - Assert.fail(); - } - bind(ApplicationTimelineStore.class).toInstance(store); - serve("/*").with(GuiceContainer.class); - } - - }); - - public class GuiceServletConfig extends GuiceServletContextListener { - - @Override - protected Injector getInjector() { - return injector; - } - } - - private ApplicationTimelineStore mockApplicationTimelineStore() - throws Exception { - TestMemoryApplicationTimelineStore store = - new TestMemoryApplicationTimelineStore(); - store.setup(); - return store.getApplicationTimelineStore(); - } - - public TestATSWebServices() { - super(new WebAppDescriptor.Builder( - "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp") - .contextListenerClass(GuiceServletConfig.class) - .filterClass(com.google.inject.servlet.GuiceFilter.class) - .contextPath("jersey-guice-filter") - .servletPath("/") - .clientConfig(new DefaultClientConfig(YarnJacksonJaxbJsonProvider.class)) - .build()); - } - - @Test - public void testAbout() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSWebServices.AboutInfo about = - response.getEntity(ATSWebServices.AboutInfo.class); - Assert.assertNotNull(about); - Assert.assertEquals("Application Timeline API", about.getAbout()); - } - - @Test - public void testGetEntities() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .path("type_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSEntities entities = response.getEntity(ATSEntities.class); - Assert.assertNotNull(entities); - Assert.assertEquals(2, entities.getEntities().size()); - ATSEntity entity1 = entities.getEntities().get(0); - Assert.assertNotNull(entity1); - Assert.assertEquals("id_1", entity1.getEntityId()); - Assert.assertEquals("type_1", entity1.getEntityType()); - Assert.assertEquals(123l, entity1.getStartTime().longValue()); - Assert.assertEquals(2, entity1.getEvents().size()); - Assert.assertEquals(2, entity1.getPrimaryFilters().size()); - Assert.assertEquals(4, entity1.getOtherInfo().size()); - ATSEntity entity2 = entities.getEntities().get(1); - Assert.assertNotNull(entity2); - Assert.assertEquals("id_2", entity2.getEntityId()); - Assert.assertEquals("type_1", entity2.getEntityType()); - Assert.assertEquals(123l, entity2.getStartTime().longValue()); - Assert.assertEquals(2, entity2.getEvents().size()); - Assert.assertEquals(2, entity2.getPrimaryFilters().size()); - Assert.assertEquals(4, entity2.getOtherInfo().size()); - } - - @Test - public void testGetEntity() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .path("type_1").path("id_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSEntity entity = response.getEntity(ATSEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("id_1", entity.getEntityId()); - Assert.assertEquals("type_1", entity.getEntityType()); - Assert.assertEquals(123l, entity.getStartTime().longValue()); - Assert.assertEquals(2, entity.getEvents().size()); - Assert.assertEquals(2, entity.getPrimaryFilters().size()); - Assert.assertEquals(4, entity.getOtherInfo().size()); - } - - @Test - public void testGetEntityFields1() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .path("type_1").path("id_1").queryParam("fields", "events,otherinfo") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSEntity entity = response.getEntity(ATSEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("id_1", entity.getEntityId()); - Assert.assertEquals("type_1", entity.getEntityType()); - Assert.assertEquals(123l, entity.getStartTime().longValue()); - Assert.assertEquals(2, entity.getEvents().size()); - Assert.assertEquals(0, entity.getPrimaryFilters().size()); - Assert.assertEquals(4, entity.getOtherInfo().size()); - } - - @Test - public void testGetEntityFields2() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .path("type_1").path("id_1").queryParam("fields", "lasteventonly," + - "primaryfilters,relatedentities") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSEntity entity = response.getEntity(ATSEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("id_1", entity.getEntityId()); - Assert.assertEquals("type_1", entity.getEntityType()); - Assert.assertEquals(123l, entity.getStartTime().longValue()); - Assert.assertEquals(1, entity.getEvents().size()); - Assert.assertEquals(2, entity.getPrimaryFilters().size()); - Assert.assertEquals(0, entity.getOtherInfo().size()); - } - - @Test - public void testGetEvents() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .path("type_1").path("events") - .queryParam("entityId", "id_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSEvents events = response.getEntity(ATSEvents.class); - Assert.assertNotNull(events); - Assert.assertEquals(1, events.getAllEvents().size()); - ATSEvents.ATSEventsOfOneEntity partEvents = events.getAllEvents().get(0); - Assert.assertEquals(2, partEvents.getEvents().size()); - ATSEvent event1 = partEvents.getEvents().get(0); - Assert.assertEquals(456l, event1.getTimestamp()); - Assert.assertEquals("end_event", event1.getEventType()); - Assert.assertEquals(1, event1.getEventInfo().size()); - ATSEvent event2 = partEvents.getEvents().get(1); - Assert.assertEquals(123l, event2.getTimestamp()); - Assert.assertEquals("start_event", event2.getEventType()); - Assert.assertEquals(0, event2.getEventInfo().size()); - } - - @Test - public void testPostEntities() throws Exception { - ATSEntities entities = new ATSEntities(); - ATSEntity entity = new ATSEntity(); - entity.setEntityId("test id"); - entity.setEntityType("test type"); - entity.setStartTime(System.currentTimeMillis()); - entities.addEntity(entity); - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("apptimeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - ATSPutErrors errors = response.getEntity(ATSPutErrors.class); - Assert.assertNotNull(errors); - Assert.assertEquals(0, errors.getErrors().size()); - // verify the entity exists in the store - response = r.path("ws").path("v1").path("apptimeline") - .path("test type").path("test id") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - entity = response.getEntity(ATSEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("test id", entity.getEntityId()); - Assert.assertEquals("test type", entity.getEntityType()); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java new file mode 100644 index 0000000..fd93ba1 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java @@ -0,0 +1,249 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; + +import static org.junit.Assert.assertEquals; + +import javax.ws.rs.core.MediaType; + +import junit.framework.Assert; + +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore; +import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.junit.Test; + +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.servlet.GuiceServletContextListener; +import com.google.inject.servlet.ServletModule; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; +import com.sun.jersey.test.framework.JerseyTest; +import com.sun.jersey.test.framework.WebAppDescriptor; + + +public class TestTimelineWebServices extends JerseyTest { + + private static TimelineStore store; + + private Injector injector = Guice.createInjector(new ServletModule() { + + @Override + protected void configureServlets() { + bind(YarnJacksonJaxbJsonProvider.class); + bind(TimelineWebServices.class); + bind(GenericExceptionHandler.class); + try{ + store = mockTimelineStore(); + } catch (Exception e) { + Assert.fail(); + } + bind(TimelineStore.class).toInstance(store); + serve("/*").with(GuiceContainer.class); + } + + }); + + public class GuiceServletConfig extends GuiceServletContextListener { + + @Override + protected Injector getInjector() { + return injector; + } + } + + private TimelineStore mockTimelineStore() + throws Exception { + TestMemoryTimelineStore store = + new TestMemoryTimelineStore(); + store.setup(); + return store.getTimelineStore(); + } + + public TestTimelineWebServices() { + super(new WebAppDescriptor.Builder( + "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp") + .contextListenerClass(GuiceServletConfig.class) + .filterClass(com.google.inject.servlet.GuiceFilter.class) + .contextPath("jersey-guice-filter") + .servletPath("/") + .clientConfig(new DefaultClientConfig(YarnJacksonJaxbJsonProvider.class)) + .build()); + } + + @Test + public void testAbout() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineWebServices.AboutInfo about = + response.getEntity(TimelineWebServices.AboutInfo.class); + Assert.assertNotNull(about); + Assert.assertEquals("Timeline API", about.getAbout()); + } + + @Test + public void testGetEntities() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntities entities = response.getEntity(TimelineEntities.class); + Assert.assertNotNull(entities); + Assert.assertEquals(2, entities.getEntities().size()); + TimelineEntity entity1 = entities.getEntities().get(0); + Assert.assertNotNull(entity1); + Assert.assertEquals("id_1", entity1.getEntityId()); + Assert.assertEquals("type_1", entity1.getEntityType()); + Assert.assertEquals(123l, entity1.getStartTime().longValue()); + Assert.assertEquals(2, entity1.getEvents().size()); + Assert.assertEquals(2, entity1.getPrimaryFilters().size()); + Assert.assertEquals(4, entity1.getOtherInfo().size()); + TimelineEntity entity2 = entities.getEntities().get(1); + Assert.assertNotNull(entity2); + Assert.assertEquals("id_2", entity2.getEntityId()); + Assert.assertEquals("type_1", entity2.getEntityType()); + Assert.assertEquals(123l, entity2.getStartTime().longValue()); + Assert.assertEquals(2, entity2.getEvents().size()); + Assert.assertEquals(2, entity2.getPrimaryFilters().size()); + Assert.assertEquals(4, entity2.getOtherInfo().size()); + } + + @Test + public void testGetEntity() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("id_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntity entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(2, entity.getEvents().size()); + Assert.assertEquals(2, entity.getPrimaryFilters().size()); + Assert.assertEquals(4, entity.getOtherInfo().size()); + } + + @Test + public void testGetEntityFields1() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("id_1").queryParam("fields", "events,otherinfo") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntity entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(2, entity.getEvents().size()); + Assert.assertEquals(0, entity.getPrimaryFilters().size()); + Assert.assertEquals(4, entity.getOtherInfo().size()); + } + + @Test + public void testGetEntityFields2() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("id_1").queryParam("fields", "lasteventonly," + + "primaryfilters,relatedentities") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntity entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(1, entity.getEvents().size()); + Assert.assertEquals(2, entity.getPrimaryFilters().size()); + Assert.assertEquals(0, entity.getOtherInfo().size()); + } + + @Test + public void testGetEvents() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("events") + .queryParam("entityId", "id_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEvents events = response.getEntity(TimelineEvents.class); + Assert.assertNotNull(events); + Assert.assertEquals(1, events.getAllEvents().size()); + TimelineEvents.EventsOfOneEntity partEvents = events.getAllEvents().get(0); + Assert.assertEquals(2, partEvents.getEvents().size()); + TimelineEvent event1 = partEvents.getEvents().get(0); + Assert.assertEquals(456l, event1.getTimestamp()); + Assert.assertEquals("end_event", event1.getEventType()); + Assert.assertEquals(1, event1.getEventInfo().size()); + TimelineEvent event2 = partEvents.getEvents().get(1); + Assert.assertEquals(123l, event2.getTimestamp()); + Assert.assertEquals("start_event", event2.getEventType()); + Assert.assertEquals(0, event2.getEventInfo().size()); + } + + @Test + public void testPostEntities() throws Exception { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id"); + entity.setEntityType("test type"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResposne); + Assert.assertEquals(0, putResposne.getErrors().size()); + // verify the entity exists in the store + response = r.path("ws").path("v1").path("timeline") + .path("test type").path("test id") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("test id", entity.getEntityId()); + Assert.assertEquals("test type", entity.getEntityType()); + } + +}