commit ff60601e23b80b4a518572a0f93241351b48f163 Author: Vinod Kumar Vavilapalli Date: Tue May 27 11:49:32 2014 -0700 Timeline server package rename diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index a528095..08d1336 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -1143,7 +1143,7 @@ Store class name for timeline store. yarn.timeline-service.store-class - org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore + org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index 5e1277f..dfd8c29 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -38,12 +38,12 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineAuthenticationFilterInitializer; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineDelegationTokenSecretManagerService; import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; +import org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; +import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer; +import org.apache.hadoop.yarn.server.timeline.security.TimelineDelegationTokenSecretManagerService; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java deleted file mode 100644 index 4b202d8..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; - -/** - * The unique identifier for an entity - */ -@Private -@Unstable -public class EntityIdentifier implements Comparable { - - private String id; - private String type; - - public EntityIdentifier(String id, String type) { - this.id = id; - this.type = type; - } - - /** - * Get the entity Id. - * @return The entity Id. - */ - public String getId() { - return id; - } - - /** - * Get the entity type. - * @return The entity type. - */ - public String getType() { - return type; - } - - @Override - public int compareTo(EntityIdentifier other) { - int c = type.compareTo(other.type); - if (c != 0) return c; - return id.compareTo(other.id); - } - - @Override - public int hashCode() { - // generated by eclipse - final int prime = 31; - int result = 1; - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - // generated by eclipse - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - EntityIdentifier other = (EntityIdentifier) obj; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equals(other.type)) - return false; - return true; - } - - @Override - public String toString() { - return "{ id: " + id + ", type: "+ type + " }"; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java deleted file mode 100644 index b1846a3..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/GenericObjectMapper.java +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import java.io.IOException; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.ObjectReader; -import org.codehaus.jackson.map.ObjectWriter; - -/** - * A utility class providing methods for serializing and deserializing - * objects. The {@link #write(Object)} and {@link #read(byte[])} methods are - * used by the {@link LeveldbTimelineStore} to store and retrieve arbitrary - * JSON, while the {@link #writeReverseOrderedLong} and {@link - * #readReverseOrderedLong} methods are used to sort entities in descending - * start time order. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class GenericObjectMapper { - private static final byte[] EMPTY_BYTES = new byte[0]; - - public static final ObjectReader OBJECT_READER; - public static final ObjectWriter OBJECT_WRITER; - - static { - ObjectMapper mapper = new ObjectMapper(); - OBJECT_READER = mapper.reader(Object.class); - OBJECT_WRITER = mapper.writer(); - } - - /** - * Serializes an Object into a byte array. Along with {@link #read(byte[])}, - * can be used to serialize an Object and deserialize it into an Object of - * the same type without needing to specify the Object's type, - * as long as it is one of the JSON-compatible objects understood by - * ObjectMapper. - * - * @param o An Object - * @return A byte array representation of the Object - * @throws IOException if there is a write error - */ - public static byte[] write(Object o) throws IOException { - if (o == null) { - return EMPTY_BYTES; - } - return OBJECT_WRITER.writeValueAsBytes(o); - } - - /** - * Deserializes an Object from a byte array created with - * {@link #write(Object)}. - * - * @param b A byte array - * @return An Object - * @throws IOException if there is a read error - */ - public static Object read(byte[] b) throws IOException { - return read(b, 0); - } - - /** - * Deserializes an Object from a byte array at a specified offset, assuming - * the bytes were created with {@link #write(Object)}. - * - * @param b A byte array - * @param offset Offset into the array - * @return An Object - * @throws IOException if there is a read error - */ - public static Object read(byte[] b, int offset) throws IOException { - if (b == null || b.length == 0) { - return null; - } - return OBJECT_READER.readValue(b, offset, b.length - offset); - } - - /** - * Converts a long to a 8-byte array so that lexicographic ordering of the - * produced byte arrays sort the longs in descending order. - * - * @param l A long - * @return A byte array - */ - public static byte[] writeReverseOrderedLong(long l) { - byte[] b = new byte[8]; - return writeReverseOrderedLong(l, b, 0); - } - - public static byte[] writeReverseOrderedLong(long l, byte[] b, int offset) { - b[offset] = (byte)(0x7f ^ ((l >> 56) & 0xff)); - for (int i = offset+1; i < offset+7; i++) { - b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff)); - } - b[offset+7] = (byte)(0xff ^ (l & 0xff)); - return b; - } - - /** - * Reads 8 bytes from an array starting at the specified offset and - * converts them to a long. The bytes are assumed to have been created - * with {@link #writeReverseOrderedLong}. - * - * @param b A byte array - * @param offset An offset into the byte array - * @return A long - */ - public static long readReverseOrderedLong(byte[] b, int offset) { - long l = b[offset] & 0xff; - for (int i = 1; i < 8; i++) { - l = l << 8; - l = l | (b[offset+i]&0xff); - } - return l ^ 0x7fffffffffffffffl; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java deleted file mode 100644 index 51bffda..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java +++ /dev/null @@ -1,1492 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.readReverseOrderedLong; -import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeMap; -import java.util.concurrent.locks.ReentrantLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; - -import org.apache.commons.collections.map.LRUMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.WritableComparator; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.fusesource.leveldbjni.JniDBFactory; -import org.iq80.leveldb.DB; -import org.iq80.leveldb.DBIterator; -import org.iq80.leveldb.Options; -import org.iq80.leveldb.ReadOptions; -import org.iq80.leveldb.WriteBatch; -import org.iq80.leveldb.WriteOptions; - -import com.google.common.annotations.VisibleForTesting; - -/** - *

An implementation of an application timeline store backed by leveldb.

- * - *

There are three sections of the db, the start time section, - * the entity section, and the indexed entity section.

- * - *

The start time section is used to retrieve the unique start time for - * a given entity. Its values each contain a start time while its keys are of - * the form:

- *
- *   START_TIME_LOOKUP_PREFIX + entity type + entity id
- * - *

The entity section is ordered by entity type, then entity start time - * descending, then entity ID. There are four sub-sections of the entity - * section: events, primary filters, related entities, - * and other info. The event entries have event info serialized into their - * values. The other info entries have values corresponding to the values of - * the other info name/value map for the entry (note the names are contained - * in the key). All other entries have empty values. The key structure is as - * follows:

- *
- *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id
- *
- *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- *     EVENTS_COLUMN + reveventtimestamp + eventtype
- *
- *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- *     PRIMARY_FILTERS_COLUMN + name + value
- *
- *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- *     OTHER_INFO_COLUMN + name
- *
- *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- *     RELATED_ENTITIES_COLUMN + relatedentity type + relatedentity id
- *
- *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- *     INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN + relatedentity type +
- *     relatedentity id
- * - *

The indexed entity section contains a primary filter name and primary - * filter value as the prefix. Within a given name/value, entire entity - * entries are stored in the same format as described in the entity section - * above (below, "key" represents any one of the possible entity entry keys - * described above).

- *
- *   INDEXED_ENTRY_PREFIX + primaryfilter name + primaryfilter value +
- *     key
- */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class LeveldbTimelineStore extends AbstractService - implements TimelineStore { - private static final Log LOG = LogFactory - .getLog(LeveldbTimelineStore.class); - - @Private - @VisibleForTesting - static final String FILENAME = "leveldb-timeline-store.ldb"; - - private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(); - private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(); - private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(); - - private static final byte[] EVENTS_COLUMN = "e".getBytes(); - private static final byte[] PRIMARY_FILTERS_COLUMN = "f".getBytes(); - private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(); - private static final byte[] RELATED_ENTITIES_COLUMN = "r".getBytes(); - private static final byte[] INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN = - "z".getBytes(); - - private static final byte[] EMPTY_BYTES = new byte[0]; - - @Private - @VisibleForTesting - static final FsPermission LEVELDB_DIR_UMASK = FsPermission - .createImmutable((short) 0700); - - private Map startTimeWriteCache; - private Map startTimeReadCache; - - /** - * Per-entity locks are obtained when writing. - */ - private final LockMap writeLocks = - new LockMap(); - - private final ReentrantReadWriteLock deleteLock = - new ReentrantReadWriteLock(); - - private DB db; - - private Thread deletionThread; - - public LeveldbTimelineStore() { - super(LeveldbTimelineStore.class.getName()); - } - - @Override - @SuppressWarnings("unchecked") - protected void serviceInit(Configuration conf) throws Exception { - Options options = new Options(); - options.createIfMissing(true); - options.cacheSize(conf.getLong( - YarnConfiguration.TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE, - YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE)); - JniDBFactory factory = new JniDBFactory(); - Path dbPath = new Path( - conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH), FILENAME); - FileSystem localFS = null; - try { - localFS = FileSystem.getLocal(conf); - if (!localFS.exists(dbPath)) { - if (!localFS.mkdirs(dbPath)) { - throw new IOException("Couldn't create directory for leveldb " + - "timeline store " + dbPath); - } - localFS.setPermission(dbPath, LEVELDB_DIR_UMASK); - } - } finally { - IOUtils.cleanup(LOG, localFS); - } - LOG.info("Using leveldb path " + dbPath); - db = factory.open(new File(dbPath.toString()), options); - startTimeWriteCache = - Collections.synchronizedMap(new LRUMap(getStartTimeWriteCacheSize( - conf))); - startTimeReadCache = - Collections.synchronizedMap(new LRUMap(getStartTimeReadCacheSize( - conf))); - - if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, true)) { - deletionThread = new EntityDeletionThread(conf); - deletionThread.start(); - } - - super.serviceInit(conf); - } - - @Override - protected void serviceStop() throws Exception { - if (deletionThread != null) { - deletionThread.interrupt(); - LOG.info("Waiting for deletion thread to complete its current action"); - try { - deletionThread.join(); - } catch (InterruptedException e) { - LOG.warn("Interrupted while waiting for deletion thread to complete," + - " closing db now", e); - } - } - IOUtils.cleanup(LOG, db); - super.serviceStop(); - } - - private static class StartAndInsertTime { - final long startTime; - final long insertTime; - - public StartAndInsertTime(long startTime, long insertTime) { - this.startTime = startTime; - this.insertTime = insertTime; - } - } - - private class EntityDeletionThread extends Thread { - private final long ttl; - private final long ttlInterval; - - public EntityDeletionThread(Configuration conf) { - ttl = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_TTL_MS, - YarnConfiguration.DEFAULT_TIMELINE_SERVICE_TTL_MS); - ttlInterval = conf.getLong( - YarnConfiguration.TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS, - YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS); - LOG.info("Starting deletion thread with ttl " + ttl + " and cycle " + - "interval " + ttlInterval); - } - - @Override - public void run() { - while (true) { - long timestamp = System.currentTimeMillis() - ttl; - try { - discardOldEntities(timestamp); - Thread.sleep(ttlInterval); - } catch (IOException e) { - LOG.error(e); - } catch (InterruptedException e) { - LOG.info("Deletion thread received interrupt, exiting"); - break; - } - } - } - } - - private static class LockMap { - private static class CountingReentrantLock extends ReentrantLock { - private static final long serialVersionUID = 1L; - private int count; - private K key; - - CountingReentrantLock(K key) { - super(); - this.count = 0; - this.key = key; - } - } - - private Map> locks = - new HashMap>(); - - synchronized CountingReentrantLock getLock(K key) { - CountingReentrantLock lock = locks.get(key); - if (lock == null) { - lock = new CountingReentrantLock(key); - locks.put(key, lock); - } - - lock.count++; - return lock; - } - - synchronized void returnLock(CountingReentrantLock lock) { - if (lock.count == 0) { - throw new IllegalStateException("Returned lock more times than it " + - "was retrieved"); - } - lock.count--; - - if (lock.count == 0) { - locks.remove(lock.key); - } - } - } - - private static class KeyBuilder { - private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10; - private byte[][] b; - private boolean[] useSeparator; - private int index; - private int length; - - public KeyBuilder(int size) { - b = new byte[size][]; - useSeparator = new boolean[size]; - index = 0; - length = 0; - } - - public static KeyBuilder newInstance() { - return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS); - } - - public KeyBuilder add(String s) { - return add(s.getBytes(), true); - } - - public KeyBuilder add(byte[] t) { - return add(t, false); - } - - public KeyBuilder add(byte[] t, boolean sep) { - b[index] = t; - useSeparator[index] = sep; - length += t.length; - if (sep) { - length++; - } - index++; - return this; - } - - public byte[] getBytes() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(length); - for (int i = 0; i < index; i++) { - baos.write(b[i]); - if (i < index-1 && useSeparator[i]) { - baos.write(0x0); - } - } - return baos.toByteArray(); - } - - public byte[] getBytesForLookup() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(length); - for (int i = 0; i < index; i++) { - baos.write(b[i]); - if (useSeparator[i]) { - baos.write(0x0); - } - } - return baos.toByteArray(); - } - } - - private static class KeyParser { - private final byte[] b; - private int offset; - - public KeyParser(byte[] b, int offset) { - this.b = b; - this.offset = offset; - } - - public String getNextString() throws IOException { - if (offset >= b.length) { - throw new IOException( - "tried to read nonexistent string from byte array"); - } - int i = 0; - while (offset+i < b.length && b[offset+i] != 0x0) { - i++; - } - String s = new String(b, offset, i); - offset = offset + i + 1; - return s; - } - - public long getNextLong() throws IOException { - if (offset+8 >= b.length) { - throw new IOException("byte array ran out when trying to read long"); - } - long l = readReverseOrderedLong(b, offset); - offset += 8; - return l; - } - - public int getOffset() { - return offset; - } - } - - @Override - public TimelineEntity getEntity(String entityId, String entityType, - EnumSet fields) throws IOException { - Long revStartTime = getStartTimeLong(entityId, entityType); - if (revStartTime == null) { - return null; - } - byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).add(writeReverseOrderedLong(revStartTime)) - .add(entityId).getBytesForLookup(); - - DBIterator iterator = null; - try { - iterator = db.iterator(); - iterator.seek(prefix); - - return getEntity(entityId, entityType, revStartTime, fields, iterator, - prefix, prefix.length); - } finally { - IOUtils.cleanup(LOG, iterator); - } - } - - /** - * Read entity from a db iterator. If no information is found in the - * specified fields for this entity, return null. - */ - private static TimelineEntity getEntity(String entityId, String entityType, - Long startTime, EnumSet fields, DBIterator iterator, - byte[] prefix, int prefixlen) throws IOException { - if (fields == null) { - fields = EnumSet.allOf(Field.class); - } - - TimelineEntity entity = new TimelineEntity(); - boolean events = false; - boolean lastEvent = false; - if (fields.contains(Field.EVENTS)) { - events = true; - } else if (fields.contains(Field.LAST_EVENT_ONLY)) { - lastEvent = true; - } else { - entity.setEvents(null); - } - boolean relatedEntities = false; - if (fields.contains(Field.RELATED_ENTITIES)) { - relatedEntities = true; - } else { - entity.setRelatedEntities(null); - } - boolean primaryFilters = false; - if (fields.contains(Field.PRIMARY_FILTERS)) { - primaryFilters = true; - } else { - entity.setPrimaryFilters(null); - } - boolean otherInfo = false; - if (fields.contains(Field.OTHER_INFO)) { - otherInfo = true; - } else { - entity.setOtherInfo(null); - } - - // iterate through the entity's entry, parsing information if it is part - // of a requested field - for (; iterator.hasNext(); iterator.next()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefixlen, key)) { - break; - } - if (key.length == prefixlen) { - continue; - } - if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) { - if (primaryFilters) { - addPrimaryFilter(entity, key, - prefixlen + PRIMARY_FILTERS_COLUMN.length); - } - } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) { - if (otherInfo) { - entity.addOtherInfo(parseRemainingKey(key, - prefixlen + OTHER_INFO_COLUMN.length), - GenericObjectMapper.read(iterator.peekNext().getValue())); - } - } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) { - if (relatedEntities) { - addRelatedEntity(entity, key, - prefixlen + RELATED_ENTITIES_COLUMN.length); - } - } else if (key[prefixlen] == EVENTS_COLUMN[0]) { - if (events || (lastEvent && - entity.getEvents().size() == 0)) { - TimelineEvent event = getEntityEvent(null, key, prefixlen + - EVENTS_COLUMN.length, iterator.peekNext().getValue()); - if (event != null) { - entity.addEvent(event); - } - } - } else { - if (key[prefixlen] != - INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) { - LOG.warn(String.format("Found unexpected column for entity %s of " + - "type %s (0x%02x)", entityId, entityType, key[prefixlen])); - } - } - } - - entity.setEntityId(entityId); - entity.setEntityType(entityType); - entity.setStartTime(startTime); - - return entity; - } - - @Override - public TimelineEvents getEntityTimelines(String entityType, - SortedSet entityIds, Long limit, Long windowStart, - Long windowEnd, Set eventType) throws IOException { - TimelineEvents events = new TimelineEvents(); - if (entityIds == null || entityIds.isEmpty()) { - return events; - } - // create a lexicographically-ordered map from start time to entities - Map> startTimeMap = new TreeMap>(new Comparator() { - @Override - public int compare(byte[] o1, byte[] o2) { - return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, - o2.length); - } - }); - DBIterator iterator = null; - try { - // look up start times for the specified entities - // skip entities with no start time - for (String entityId : entityIds) { - byte[] startTime = getStartTime(entityId, entityType); - if (startTime != null) { - List entities = startTimeMap.get(startTime); - if (entities == null) { - entities = new ArrayList(); - startTimeMap.put(startTime, entities); - } - entities.add(new EntityIdentifier(entityId, entityType)); - } - } - for (Entry> entry : - startTimeMap.entrySet()) { - // look up the events matching the given parameters (limit, - // start time, end time, event types) for entities whose start times - // were found and add the entities to the return list - byte[] revStartTime = entry.getKey(); - for (EntityIdentifier entityIdentifier : entry.getValue()) { - EventsOfOneEntity entity = new EventsOfOneEntity(); - entity.setEntityId(entityIdentifier.getId()); - entity.setEntityType(entityType); - events.addEvent(entity); - KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).add(revStartTime).add(entityIdentifier.getId()) - .add(EVENTS_COLUMN); - byte[] prefix = kb.getBytesForLookup(); - if (windowEnd == null) { - windowEnd = Long.MAX_VALUE; - } - byte[] revts = writeReverseOrderedLong(windowEnd); - kb.add(revts); - byte[] first = kb.getBytesForLookup(); - byte[] last = null; - if (windowStart != null) { - last = KeyBuilder.newInstance().add(prefix) - .add(writeReverseOrderedLong(windowStart)).getBytesForLookup(); - } - if (limit == null) { - limit = DEFAULT_LIMIT; - } - iterator = db.iterator(); - for (iterator.seek(first); entity.getEvents().size() < limit && - iterator.hasNext(); iterator.next()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefix.length, key) || (last != null && - WritableComparator.compareBytes(key, 0, key.length, last, 0, - last.length) > 0)) { - break; - } - TimelineEvent event = getEntityEvent(eventType, key, prefix.length, - iterator.peekNext().getValue()); - if (event != null) { - entity.addEvent(event); - } - } - } - } - } finally { - IOUtils.cleanup(LOG, iterator); - } - return events; - } - - /** - * Returns true if the byte array begins with the specified prefix. - */ - private static boolean prefixMatches(byte[] prefix, int prefixlen, - byte[] b) { - if (b.length < prefixlen) { - return false; - } - return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0, - prefixlen) == 0; - } - - @Override - public TimelineEntities getEntities(String entityType, - Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, - NameValuePair primaryFilter, Collection secondaryFilters, - EnumSet fields) throws IOException { - if (primaryFilter == null) { - // if no primary filter is specified, prefix the lookup with - // ENTITY_ENTRY_PREFIX - return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit, - windowStart, windowEnd, fromId, fromTs, secondaryFilters, fields); - } else { - // if a primary filter is specified, prefix the lookup with - // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue + - // ENTITY_ENTRY_PREFIX - byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) - .add(primaryFilter.getName()) - .add(GenericObjectMapper.write(primaryFilter.getValue()), true) - .add(ENTITY_ENTRY_PREFIX).getBytesForLookup(); - return getEntityByTime(base, entityType, limit, windowStart, windowEnd, - fromId, fromTs, secondaryFilters, fields); - } - } - - /** - * Retrieves a list of entities satisfying given parameters. - * - * @param base A byte array prefix for the lookup - * @param entityType The type of the entity - * @param limit A limit on the number of entities to return - * @param starttime The earliest entity start time to retrieve (exclusive) - * @param endtime The latest entity start time to retrieve (inclusive) - * @param fromId Retrieve entities starting with this entity - * @param fromTs Ignore entities with insert timestamp later than this ts - * @param secondaryFilters Filter pairs that the entities should match - * @param fields The set of fields to retrieve - * @return A list of entities - * @throws IOException - */ - private TimelineEntities getEntityByTime(byte[] base, - String entityType, Long limit, Long starttime, Long endtime, - String fromId, Long fromTs, Collection secondaryFilters, - EnumSet fields) throws IOException { - DBIterator iterator = null; - try { - KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); - // only db keys matching the prefix (base + entity type) will be parsed - byte[] prefix = kb.getBytesForLookup(); - if (endtime == null) { - // if end time is null, place no restriction on end time - endtime = Long.MAX_VALUE; - } - // construct a first key that will be seeked to using end time or fromId - byte[] first = null; - if (fromId != null) { - Long fromIdStartTime = getStartTimeLong(fromId, entityType); - if (fromIdStartTime == null) { - // no start time for provided id, so return empty entities - return new TimelineEntities(); - } - if (fromIdStartTime <= endtime) { - // if provided id's start time falls before the end of the window, - // use it to construct the seek key - first = kb.add(writeReverseOrderedLong(fromIdStartTime)) - .add(fromId).getBytesForLookup(); - } - } - // if seek key wasn't constructed using fromId, construct it using end ts - if (first == null) { - first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup(); - } - byte[] last = null; - if (starttime != null) { - // if start time is not null, set a last key that will not be - // iterated past - last = KeyBuilder.newInstance().add(base).add(entityType) - .add(writeReverseOrderedLong(starttime)).getBytesForLookup(); - } - if (limit == null) { - // if limit is not specified, use the default - limit = DEFAULT_LIMIT; - } - - TimelineEntities entities = new TimelineEntities(); - iterator = db.iterator(); - iterator.seek(first); - // iterate until one of the following conditions is met: limit is - // reached, there are no more keys, the key prefix no longer matches, - // or a start time has been specified and reached/exceeded - while (entities.getEntities().size() < limit && iterator.hasNext()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefix.length, key) || (last != null && - WritableComparator.compareBytes(key, 0, key.length, last, 0, - last.length) > 0)) { - break; - } - // read the start time and entity id from the current key - KeyParser kp = new KeyParser(key, prefix.length); - Long startTime = kp.getNextLong(); - String entityId = kp.getNextString(); - - if (fromTs != null) { - long insertTime = readReverseOrderedLong(iterator.peekNext() - .getValue(), 0); - if (insertTime > fromTs) { - byte[] firstKey = key; - while (iterator.hasNext() && prefixMatches(firstKey, - kp.getOffset(), key)) { - iterator.next(); - key = iterator.peekNext().getKey(); - } - continue; - } - } - - // parse the entity that owns this key, iterating over all keys for - // the entity - TimelineEntity entity = getEntity(entityId, entityType, startTime, - fields, iterator, key, kp.getOffset()); - // determine if the retrieved entity matches the provided secondary - // filters, and if so add it to the list of entities to return - boolean filterPassed = true; - if (secondaryFilters != null) { - for (NameValuePair filter : secondaryFilters) { - Object v = entity.getOtherInfo().get(filter.getName()); - if (v == null) { - Set vs = entity.getPrimaryFilters() - .get(filter.getName()); - if (vs != null && !vs.contains(filter.getValue())) { - filterPassed = false; - break; - } - } else if (!v.equals(filter.getValue())) { - filterPassed = false; - break; - } - } - } - if (filterPassed) { - entities.addEntity(entity); - } - } - return entities; - } finally { - IOUtils.cleanup(LOG, iterator); - } - } - - /** - * Put a single entity. If there is an error, add a TimelinePutError to the - * given response. - */ - private void put(TimelineEntity entity, TimelinePutResponse response) { - LockMap.CountingReentrantLock lock = - writeLocks.getLock(new EntityIdentifier(entity.getEntityId(), - entity.getEntityType())); - lock.lock(); - WriteBatch writeBatch = null; - List relatedEntitiesWithoutStartTimes = - new ArrayList(); - byte[] revStartTime = null; - try { - writeBatch = db.createWriteBatch(); - List events = entity.getEvents(); - // look up the start time for the entity - StartAndInsertTime startAndInsertTime = getAndSetStartTime( - entity.getEntityId(), entity.getEntityType(), - entity.getStartTime(), events); - if (startAndInsertTime == null) { - // if no start time is found, add an error and return - TimelinePutError error = new TimelinePutError(); - error.setEntityId(entity.getEntityId()); - error.setEntityType(entity.getEntityType()); - error.setErrorCode(TimelinePutError.NO_START_TIME); - response.addError(error); - return; - } - revStartTime = writeReverseOrderedLong(startAndInsertTime - .startTime); - - Map> primaryFilters = entity.getPrimaryFilters(); - - // write entity marker - byte[] markerKey = createEntityMarkerKey(entity.getEntityId(), - entity.getEntityType(), revStartTime); - byte[] markerValue = writeReverseOrderedLong(startAndInsertTime - .insertTime); - writeBatch.put(markerKey, markerValue); - writePrimaryFilterEntries(writeBatch, primaryFilters, markerKey, - markerValue); - - // write event entries - if (events != null && !events.isEmpty()) { - for (TimelineEvent event : events) { - byte[] revts = writeReverseOrderedLong(event.getTimestamp()); - byte[] key = createEntityEventKey(entity.getEntityId(), - entity.getEntityType(), revStartTime, revts, - event.getEventType()); - byte[] value = GenericObjectMapper.write(event.getEventInfo()); - writeBatch.put(key, value); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); - } - } - - // write related entity entries - Map> relatedEntities = - entity.getRelatedEntities(); - if (relatedEntities != null && !relatedEntities.isEmpty()) { - for (Entry> relatedEntityList : - relatedEntities.entrySet()) { - String relatedEntityType = relatedEntityList.getKey(); - for (String relatedEntityId : relatedEntityList.getValue()) { - // invisible "reverse" entries (entity -> related entity) - byte[] key = createReverseRelatedEntityKey(entity.getEntityId(), - entity.getEntityType(), revStartTime, relatedEntityId, - relatedEntityType); - writeBatch.put(key, EMPTY_BYTES); - // look up start time of related entity - byte[] relatedEntityStartTime = getStartTime(relatedEntityId, - relatedEntityType); - // delay writing the related entity if no start time is found - if (relatedEntityStartTime == null) { - relatedEntitiesWithoutStartTimes.add( - new EntityIdentifier(relatedEntityId, relatedEntityType)); - continue; - } - // write "forward" entry (related entity -> entity) - key = createRelatedEntityKey(relatedEntityId, - relatedEntityType, relatedEntityStartTime, - entity.getEntityId(), entity.getEntityType()); - writeBatch.put(key, EMPTY_BYTES); - } - } - } - - // write primary filter entries - if (primaryFilters != null && !primaryFilters.isEmpty()) { - for (Entry> primaryFilter : - primaryFilters.entrySet()) { - for (Object primaryFilterValue : primaryFilter.getValue()) { - byte[] key = createPrimaryFilterKey(entity.getEntityId(), - entity.getEntityType(), revStartTime, - primaryFilter.getKey(), primaryFilterValue); - writeBatch.put(key, EMPTY_BYTES); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, - EMPTY_BYTES); - } - } - } - - // write other info entries - Map otherInfo = entity.getOtherInfo(); - if (otherInfo != null && !otherInfo.isEmpty()) { - for (Entry i : otherInfo.entrySet()) { - byte[] key = createOtherInfoKey(entity.getEntityId(), - entity.getEntityType(), revStartTime, i.getKey()); - byte[] value = GenericObjectMapper.write(i.getValue()); - writeBatch.put(key, value); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); - } - } - db.write(writeBatch); - } catch (IOException e) { - LOG.error("Error putting entity " + entity.getEntityId() + - " of type " + entity.getEntityType(), e); - TimelinePutError error = new TimelinePutError(); - error.setEntityId(entity.getEntityId()); - error.setEntityType(entity.getEntityType()); - error.setErrorCode(TimelinePutError.IO_EXCEPTION); - response.addError(error); - } finally { - lock.unlock(); - writeLocks.returnLock(lock); - IOUtils.cleanup(LOG, writeBatch); - } - - for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) { - lock = writeLocks.getLock(relatedEntity); - lock.lock(); - try { - StartAndInsertTime relatedEntityStartAndInsertTime = - getAndSetStartTime(relatedEntity.getId(), relatedEntity.getType(), - readReverseOrderedLong(revStartTime, 0), null); - if (relatedEntityStartAndInsertTime == null) { - throw new IOException("Error setting start time for related entity"); - } - byte[] relatedEntityStartTime = writeReverseOrderedLong( - relatedEntityStartAndInsertTime.startTime); - db.put(createRelatedEntityKey(relatedEntity.getId(), - relatedEntity.getType(), relatedEntityStartTime, - entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES); - db.put(createEntityMarkerKey(relatedEntity.getId(), - relatedEntity.getType(), relatedEntityStartTime), - writeReverseOrderedLong(relatedEntityStartAndInsertTime - .insertTime)); - } catch (IOException e) { - LOG.error("Error putting related entity " + relatedEntity.getId() + - " of type " + relatedEntity.getType() + " for entity " + - entity.getEntityId() + " of type " + entity.getEntityType(), e); - TimelinePutError error = new TimelinePutError(); - error.setEntityId(entity.getEntityId()); - error.setEntityType(entity.getEntityType()); - error.setErrorCode(TimelinePutError.IO_EXCEPTION); - response.addError(error); - } finally { - lock.unlock(); - writeLocks.returnLock(lock); - } - } - } - - /** - * For a given key / value pair that has been written to the db, - * write additional entries to the db for each primary filter. - */ - private static void writePrimaryFilterEntries(WriteBatch writeBatch, - Map> primaryFilters, byte[] key, byte[] value) - throws IOException { - if (primaryFilters != null && !primaryFilters.isEmpty()) { - for (Entry> pf : primaryFilters.entrySet()) { - for (Object pfval : pf.getValue()) { - writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval, - key), value); - } - } - } - } - - @Override - public TimelinePutResponse put(TimelineEntities entities) { - try { - deleteLock.readLock().lock(); - TimelinePutResponse response = new TimelinePutResponse(); - for (TimelineEntity entity : entities.getEntities()) { - put(entity, response); - } - return response; - } finally { - deleteLock.readLock().unlock(); - } - } - - /** - * Get the unique start time for a given entity as a byte array that sorts - * the timestamps in reverse order (see {@link - * GenericObjectMapper#writeReverseOrderedLong(long)}). - * - * @param entityId The id of the entity - * @param entityType The type of the entity - * @return A byte array, null if not found - * @throws IOException - */ - private byte[] getStartTime(String entityId, String entityType) - throws IOException { - Long l = getStartTimeLong(entityId, entityType); - return l == null ? null : writeReverseOrderedLong(l); - } - - /** - * Get the unique start time for a given entity as a Long. - * - * @param entityId The id of the entity - * @param entityType The type of the entity - * @return A Long, null if not found - * @throws IOException - */ - private Long getStartTimeLong(String entityId, String entityType) - throws IOException { - EntityIdentifier entity = new EntityIdentifier(entityId, entityType); - // start time is not provided, so try to look it up - if (startTimeReadCache.containsKey(entity)) { - // found the start time in the cache - return startTimeReadCache.get(entity); - } else { - // try to look up the start time in the db - byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); - byte[] v = db.get(b); - if (v == null) { - // did not find the start time in the db - return null; - } else { - // found the start time in the db - Long l = readReverseOrderedLong(v, 0); - startTimeReadCache.put(entity, l); - return l; - } - } - } - - /** - * Get the unique start time for a given entity as a byte array that sorts - * the timestamps in reverse order (see {@link - * GenericObjectMapper#writeReverseOrderedLong(long)}). If the start time - * doesn't exist, set it based on the information provided. Should only be - * called when a lock has been obtained on the entity. - * - * @param entityId The id of the entity - * @param entityType The type of the entity - * @param startTime The start time of the entity, or null - * @param events A list of events for the entity, or null - * @return A StartAndInsertTime - * @throws IOException - */ - private StartAndInsertTime getAndSetStartTime(String entityId, - String entityType, Long startTime, List events) - throws IOException { - EntityIdentifier entity = new EntityIdentifier(entityId, entityType); - if (startTime == null) { - // start time is not provided, so try to look it up - if (startTimeWriteCache.containsKey(entity)) { - // found the start time in the cache - return startTimeWriteCache.get(entity); - } else { - if (events != null) { - // prepare a start time from events in case it is needed - Long min = Long.MAX_VALUE; - for (TimelineEvent e : events) { - if (min > e.getTimestamp()) { - min = e.getTimestamp(); - } - } - startTime = min; - } - return checkStartTimeInDb(entity, startTime); - } - } else { - // start time is provided - if (startTimeWriteCache.containsKey(entity)) { - // always use start time from cache if it exists - return startTimeWriteCache.get(entity); - } else { - // check the provided start time matches the db - return checkStartTimeInDb(entity, startTime); - } - } - } - - /** - * Checks db for start time and returns it if it exists. If it doesn't - * exist, writes the suggested start time (if it is not null). This is - * only called when the start time is not found in the cache, - * so it adds it back into the cache if it is found. Should only be called - * when a lock has been obtained on the entity. - */ - private StartAndInsertTime checkStartTimeInDb(EntityIdentifier entity, - Long suggestedStartTime) throws IOException { - StartAndInsertTime startAndInsertTime = null; - // create lookup key for start time - byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); - // retrieve value for key - byte[] v = db.get(b); - if (v == null) { - // start time doesn't exist in db - if (suggestedStartTime == null) { - return null; - } - startAndInsertTime = new StartAndInsertTime(suggestedStartTime, - System.currentTimeMillis()); - - // write suggested start time - v = new byte[16]; - writeReverseOrderedLong(suggestedStartTime, v, 0); - writeReverseOrderedLong(startAndInsertTime.insertTime, v, 8); - WriteOptions writeOptions = new WriteOptions(); - writeOptions.sync(true); - db.put(b, v, writeOptions); - } else { - // found start time in db, so ignore suggested start time - startAndInsertTime = new StartAndInsertTime(readReverseOrderedLong(v, 0), - readReverseOrderedLong(v, 8)); - } - startTimeWriteCache.put(entity, startAndInsertTime); - startTimeReadCache.put(entity, startAndInsertTime.startTime); - return startAndInsertTime; - } - - /** - * Creates a key for looking up the start time of a given entity, - * of the form START_TIME_LOOKUP_PREFIX + entity type + entity id. - */ - private static byte[] createStartTimeLookupKey(String entityId, - String entityType) throws IOException { - return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX) - .add(entityType).add(entityId).getBytes(); - } - - /** - * Creates an entity marker, serializing ENTITY_ENTRY_PREFIX + entity type + - * revstarttime + entity id. - */ - private static byte[] createEntityMarkerKey(String entityId, - String entityType, byte[] revStartTime) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).add(revStartTime).add(entityId).getBytesForLookup(); - } - - /** - * Creates an index entry for the given key of the form - * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key. - */ - private static byte[] addPrimaryFilterToKey(String primaryFilterName, - Object primaryFilterValue, byte[] key) throws IOException { - return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) - .add(primaryFilterName) - .add(GenericObjectMapper.write(primaryFilterValue), true).add(key) - .getBytes(); - } - - /** - * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entity type + - * revstarttime + entity id + EVENTS_COLUMN + reveventtimestamp + event type. - */ - private static byte[] createEntityEventKey(String entityId, - String entityType, byte[] revStartTime, byte[] revEventTimestamp, - String eventType) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).add(revStartTime).add(entityId).add(EVENTS_COLUMN) - .add(revEventTimestamp).add(eventType).getBytes(); - } - - /** - * Creates an event object from the given key, offset, and value. If the - * event type is not contained in the specified set of event types, - * returns null. - */ - private static TimelineEvent getEntityEvent(Set eventTypes, - byte[] key, int offset, byte[] value) throws IOException { - KeyParser kp = new KeyParser(key, offset); - long ts = kp.getNextLong(); - String tstype = kp.getNextString(); - if (eventTypes == null || eventTypes.contains(tstype)) { - TimelineEvent event = new TimelineEvent(); - event.setTimestamp(ts); - event.setEventType(tstype); - Object o = GenericObjectMapper.read(value); - if (o == null) { - event.setEventInfo(null); - } else if (o instanceof Map) { - @SuppressWarnings("unchecked") - Map m = (Map) o; - event.setEventInfo(m); - } else { - throw new IOException("Couldn't deserialize event info map"); - } - return event; - } - return null; - } - - /** - * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX + - * entity type + revstarttime + entity id + PRIMARY_FILTERS_COLUMN + name + - * value. - */ - private static byte[] createPrimaryFilterKey(String entityId, - String entityType, byte[] revStartTime, String name, Object value) - throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) - .add(revStartTime).add(entityId).add(PRIMARY_FILTERS_COLUMN).add(name) - .add(GenericObjectMapper.write(value)).getBytes(); - } - - /** - * Parses the primary filter from the given key at the given offset and - * adds it to the given entity. - */ - private static void addPrimaryFilter(TimelineEntity entity, byte[] key, - int offset) throws IOException { - KeyParser kp = new KeyParser(key, offset); - String name = kp.getNextString(); - Object value = GenericObjectMapper.read(key, kp.getOffset()); - entity.addPrimaryFilter(name, value); - } - - /** - * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entity type + - * revstarttime + entity id + OTHER_INFO_COLUMN + name. - */ - private static byte[] createOtherInfoKey(String entityId, String entityType, - byte[] revStartTime, String name) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) - .add(revStartTime).add(entityId).add(OTHER_INFO_COLUMN).add(name) - .getBytes(); - } - - /** - * Creates a string representation of the byte array from the given offset - * to the end of the array (for parsing other info keys). - */ - private static String parseRemainingKey(byte[] b, int offset) { - return new String(b, offset, b.length - offset); - } - - /** - * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX + - * entity type + revstarttime + entity id + RELATED_ENTITIES_COLUMN + - * relatedentity type + relatedentity id. - */ - private static byte[] createRelatedEntityKey(String entityId, - String entityType, byte[] revStartTime, String relatedEntityId, - String relatedEntityType) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) - .add(revStartTime).add(entityId).add(RELATED_ENTITIES_COLUMN) - .add(relatedEntityType).add(relatedEntityId).getBytes(); - } - - /** - * Parses the related entity from the given key at the given offset and - * adds it to the given entity. - */ - private static void addRelatedEntity(TimelineEntity entity, byte[] key, - int offset) throws IOException { - KeyParser kp = new KeyParser(key, offset); - String type = kp.getNextString(); - String id = kp.getNextString(); - entity.addRelatedEntity(type, id); - } - - /** - * Creates a reverse related entity key, serializing ENTITY_ENTRY_PREFIX + - * entity type + revstarttime + entity id + - * INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN + - * relatedentity type + relatedentity id. - */ - private static byte[] createReverseRelatedEntityKey(String entityId, - String entityType, byte[] revStartTime, String relatedEntityId, - String relatedEntityType) throws IOException { - return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) - .add(revStartTime).add(entityId) - .add(INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN) - .add(relatedEntityType).add(relatedEntityId).getBytes(); - } - - /** - * Clears the cache to test reloading start times from leveldb (only for - * testing). - */ - @VisibleForTesting - void clearStartTimeCache() { - startTimeWriteCache.clear(); - startTimeReadCache.clear(); - } - - @VisibleForTesting - static int getStartTimeReadCacheSize(Configuration conf) { - return conf.getInt( - YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE, - YarnConfiguration. - DEFAULT_TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE); - } - - @VisibleForTesting - static int getStartTimeWriteCacheSize(Configuration conf) { - return conf.getInt( - YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE, - YarnConfiguration. - DEFAULT_TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE); - } - - // warning is suppressed to prevent eclipse from noting unclosed resource - @SuppressWarnings("resource") - @VisibleForTesting - List getEntityTypes() throws IOException { - DBIterator iterator = null; - try { - iterator = getDbIterator(false); - List entityTypes = new ArrayList(); - iterator.seek(ENTITY_ENTRY_PREFIX); - while (iterator.hasNext()) { - byte[] key = iterator.peekNext().getKey(); - if (key[0] != ENTITY_ENTRY_PREFIX[0]) { - break; - } - KeyParser kp = new KeyParser(key, - ENTITY_ENTRY_PREFIX.length); - String entityType = kp.getNextString(); - entityTypes.add(entityType); - byte[] lookupKey = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType).getBytesForLookup(); - if (lookupKey[lookupKey.length - 1] != 0x0) { - throw new IOException("Found unexpected end byte in lookup key"); - } - lookupKey[lookupKey.length - 1] = 0x1; - iterator.seek(lookupKey); - } - return entityTypes; - } finally { - IOUtils.cleanup(LOG, iterator); - } - } - - /** - * Finds all keys in the db that have a given prefix and deletes them on - * the given write batch. - */ - private void deleteKeysWithPrefix(WriteBatch writeBatch, byte[] prefix, - DBIterator iterator) { - for (iterator.seek(prefix); iterator.hasNext(); iterator.next()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(prefix, prefix.length, key)) { - break; - } - writeBatch.delete(key); - } - } - - @VisibleForTesting - boolean deleteNextEntity(String entityType, byte[] reverseTimestamp, - DBIterator iterator, DBIterator pfIterator, boolean seeked) - throws IOException { - WriteBatch writeBatch = null; - try { - KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) - .add(entityType); - byte[] typePrefix = kb.getBytesForLookup(); - kb.add(reverseTimestamp); - if (!seeked) { - iterator.seek(kb.getBytesForLookup()); - } - if (!iterator.hasNext()) { - return false; - } - byte[] entityKey = iterator.peekNext().getKey(); - if (!prefixMatches(typePrefix, typePrefix.length, entityKey)) { - return false; - } - - // read the start time and entity id from the current key - KeyParser kp = new KeyParser(entityKey, typePrefix.length + 8); - String entityId = kp.getNextString(); - int prefixlen = kp.getOffset(); - byte[] deletePrefix = new byte[prefixlen]; - System.arraycopy(entityKey, 0, deletePrefix, 0, prefixlen); - - writeBatch = db.createWriteBatch(); - - if (LOG.isDebugEnabled()) { - LOG.debug("Deleting entity type:" + entityType + " id:" + entityId); - } - // remove start time from cache and db - writeBatch.delete(createStartTimeLookupKey(entityId, entityType)); - EntityIdentifier entityIdentifier = - new EntityIdentifier(entityId, entityType); - startTimeReadCache.remove(entityIdentifier); - startTimeWriteCache.remove(entityIdentifier); - - // delete current entity - for (; iterator.hasNext(); iterator.next()) { - byte[] key = iterator.peekNext().getKey(); - if (!prefixMatches(entityKey, prefixlen, key)) { - break; - } - writeBatch.delete(key); - - if (key.length == prefixlen) { - continue; - } - if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) { - kp = new KeyParser(key, - prefixlen + PRIMARY_FILTERS_COLUMN.length); - String name = kp.getNextString(); - Object value = GenericObjectMapper.read(key, kp.getOffset()); - deleteKeysWithPrefix(writeBatch, addPrimaryFilterToKey(name, value, - deletePrefix), pfIterator); - if (LOG.isDebugEnabled()) { - LOG.debug("Deleting entity type:" + entityType + " id:" + - entityId + " primary filter entry " + name + " " + - value); - } - } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) { - kp = new KeyParser(key, - prefixlen + RELATED_ENTITIES_COLUMN.length); - String type = kp.getNextString(); - String id = kp.getNextString(); - byte[] relatedEntityStartTime = getStartTime(id, type); - if (relatedEntityStartTime == null) { - LOG.warn("Found no start time for " + - "related entity " + id + " of type " + type + " while " + - "deleting " + entityId + " of type " + entityType); - continue; - } - writeBatch.delete(createReverseRelatedEntityKey(id, type, - relatedEntityStartTime, entityId, entityType)); - if (LOG.isDebugEnabled()) { - LOG.debug("Deleting entity type:" + entityType + " id:" + - entityId + " from invisible reverse related entity " + - "entry of type:" + type + " id:" + id); - } - } else if (key[prefixlen] == - INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) { - kp = new KeyParser(key, prefixlen + - INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN.length); - String type = kp.getNextString(); - String id = kp.getNextString(); - byte[] relatedEntityStartTime = getStartTime(id, type); - if (relatedEntityStartTime == null) { - LOG.warn("Found no start time for reverse " + - "related entity " + id + " of type " + type + " while " + - "deleting " + entityId + " of type " + entityType); - continue; - } - writeBatch.delete(createRelatedEntityKey(id, type, - relatedEntityStartTime, entityId, entityType)); - if (LOG.isDebugEnabled()) { - LOG.debug("Deleting entity type:" + entityType + " id:" + - entityId + " from related entity entry of type:" + - type + " id:" + id); - } - } - } - WriteOptions writeOptions = new WriteOptions(); - writeOptions.sync(true); - db.write(writeBatch, writeOptions); - return true; - } finally { - IOUtils.cleanup(LOG, writeBatch); - } - } - - /** - * Discards entities with start timestamp less than or equal to the given - * timestamp. - */ - @VisibleForTesting - void discardOldEntities(long timestamp) - throws IOException, InterruptedException { - byte[] reverseTimestamp = writeReverseOrderedLong(timestamp); - long totalCount = 0; - long t1 = System.currentTimeMillis(); - try { - List entityTypes = getEntityTypes(); - for (String entityType : entityTypes) { - DBIterator iterator = null; - DBIterator pfIterator = null; - long typeCount = 0; - try { - deleteLock.writeLock().lock(); - iterator = getDbIterator(false); - pfIterator = getDbIterator(false); - - if (deletionThread != null && deletionThread.isInterrupted()) { - throw new InterruptedException(); - } - boolean seeked = false; - while (deleteNextEntity(entityType, reverseTimestamp, iterator, - pfIterator, seeked)) { - typeCount++; - totalCount++; - seeked = true; - if (deletionThread != null && deletionThread.isInterrupted()) { - throw new InterruptedException(); - } - } - } catch (IOException e) { - LOG.error("Got IOException while deleting entities for type " + - entityType + ", continuing to next type", e); - } finally { - IOUtils.cleanup(LOG, iterator, pfIterator); - deleteLock.writeLock().unlock(); - if (typeCount > 0) { - LOG.info("Deleted " + typeCount + " entities of type " + - entityType); - } - } - } - } finally { - long t2 = System.currentTimeMillis(); - LOG.info("Discarded " + totalCount + " entities for timestamp " + - timestamp + " and earlier in " + (t2 - t1) / 1000.0 + " seconds"); - } - } - - @VisibleForTesting - DBIterator getDbIterator(boolean fillCache) { - ReadOptions readOptions = new ReadOptions(); - readOptions.fillCache(fillCache); - return db.iterator(readOptions); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java deleted file mode 100644 index 06f3d60..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java +++ /dev/null @@ -1,373 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.PriorityQueue; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; - -/** - * In-memory implementation of {@link TimelineStore}. This - * implementation is for test purpose only. If users improperly instantiate it, - * they may encounter reading and writing history data in different memory - * store. - * - */ -@Private -@Unstable -public class MemoryTimelineStore - extends AbstractService implements TimelineStore { - - private Map entities = - new HashMap(); - private Map entityInsertTimes = - new HashMap(); - - public MemoryTimelineStore() { - super(MemoryTimelineStore.class.getName()); - } - - @Override - public TimelineEntities getEntities(String entityType, Long limit, - Long windowStart, Long windowEnd, String fromId, Long fromTs, - NameValuePair primaryFilter, Collection secondaryFilters, - EnumSet fields) { - if (limit == null) { - limit = DEFAULT_LIMIT; - } - if (windowStart == null) { - windowStart = Long.MIN_VALUE; - } - if (windowEnd == null) { - windowEnd = Long.MAX_VALUE; - } - if (fields == null) { - fields = EnumSet.allOf(Field.class); - } - - Iterator entityIterator = null; - if (fromId != null) { - TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId, - entityType)); - if (firstEntity == null) { - return new TimelineEntities(); - } else { - entityIterator = new TreeSet(entities.values()) - .tailSet(firstEntity, true).iterator(); - } - } - if (entityIterator == null) { - entityIterator = new PriorityQueue(entities.values()) - .iterator(); - } - - List entitiesSelected = new ArrayList(); - while (entityIterator.hasNext()) { - TimelineEntity entity = entityIterator.next(); - if (entitiesSelected.size() >= limit) { - break; - } - if (!entity.getEntityType().equals(entityType)) { - continue; - } - if (entity.getStartTime() <= windowStart) { - continue; - } - if (entity.getStartTime() > windowEnd) { - continue; - } - if (fromTs != null && entityInsertTimes.get(new EntityIdentifier( - entity.getEntityId(), entity.getEntityType())) > fromTs) { - continue; - } - if (primaryFilter != null && - !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) { - continue; - } - if (secondaryFilters != null) { // AND logic - boolean flag = true; - for (NameValuePair secondaryFilter : secondaryFilters) { - if (secondaryFilter != null && !matchPrimaryFilter( - entity.getPrimaryFilters(), secondaryFilter) && - !matchFilter(entity.getOtherInfo(), secondaryFilter)) { - flag = false; - break; - } - } - if (!flag) { - continue; - } - } - entitiesSelected.add(entity); - } - List entitiesToReturn = new ArrayList(); - for (TimelineEntity entitySelected : entitiesSelected) { - entitiesToReturn.add(maskFields(entitySelected, fields)); - } - Collections.sort(entitiesToReturn); - TimelineEntities entitiesWrapper = new TimelineEntities(); - entitiesWrapper.setEntities(entitiesToReturn); - return entitiesWrapper; - } - - @Override - public TimelineEntity getEntity(String entityId, String entityType, - EnumSet fieldsToRetrieve) { - if (fieldsToRetrieve == null) { - fieldsToRetrieve = EnumSet.allOf(Field.class); - } - TimelineEntity entity = entities.get(new EntityIdentifier(entityId, entityType)); - if (entity == null) { - return null; - } else { - return maskFields(entity, fieldsToRetrieve); - } - } - - @Override - public TimelineEvents getEntityTimelines(String entityType, - SortedSet entityIds, Long limit, Long windowStart, - Long windowEnd, - Set eventTypes) { - TimelineEvents allEvents = new TimelineEvents(); - if (entityIds == null) { - return allEvents; - } - if (limit == null) { - limit = DEFAULT_LIMIT; - } - if (windowStart == null) { - windowStart = Long.MIN_VALUE; - } - if (windowEnd == null) { - windowEnd = Long.MAX_VALUE; - } - for (String entityId : entityIds) { - EntityIdentifier entityID = new EntityIdentifier(entityId, entityType); - TimelineEntity entity = entities.get(entityID); - if (entity == null) { - continue; - } - EventsOfOneEntity events = new EventsOfOneEntity(); - events.setEntityId(entityId); - events.setEntityType(entityType); - for (TimelineEvent event : entity.getEvents()) { - if (events.getEvents().size() >= limit) { - break; - } - if (event.getTimestamp() <= windowStart) { - continue; - } - if (event.getTimestamp() > windowEnd) { - continue; - } - if (eventTypes != null && !eventTypes.contains(event.getEventType())) { - continue; - } - events.addEvent(event); - } - allEvents.addEvent(events); - } - return allEvents; - } - - @Override - public TimelinePutResponse put(TimelineEntities data) { - TimelinePutResponse response = new TimelinePutResponse(); - for (TimelineEntity entity : data.getEntities()) { - EntityIdentifier entityId = - new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); - // store entity info in memory - TimelineEntity existingEntity = entities.get(entityId); - if (existingEntity == null) { - existingEntity = new TimelineEntity(); - existingEntity.setEntityId(entity.getEntityId()); - existingEntity.setEntityType(entity.getEntityType()); - existingEntity.setStartTime(entity.getStartTime()); - entities.put(entityId, existingEntity); - entityInsertTimes.put(entityId, System.currentTimeMillis()); - } - if (entity.getEvents() != null) { - if (existingEntity.getEvents() == null) { - existingEntity.setEvents(entity.getEvents()); - } else { - existingEntity.addEvents(entity.getEvents()); - } - Collections.sort(existingEntity.getEvents()); - } - // check startTime - if (existingEntity.getStartTime() == null) { - if (existingEntity.getEvents() == null - || existingEntity.getEvents().isEmpty()) { - TimelinePutError error = new TimelinePutError(); - error.setEntityId(entityId.getId()); - error.setEntityType(entityId.getType()); - error.setErrorCode(TimelinePutError.NO_START_TIME); - response.addError(error); - entities.remove(entityId); - entityInsertTimes.remove(entityId); - continue; - } else { - Long min = Long.MAX_VALUE; - for (TimelineEvent e : entity.getEvents()) { - if (min > e.getTimestamp()) { - min = e.getTimestamp(); - } - } - existingEntity.setStartTime(min); - } - } - if (entity.getPrimaryFilters() != null) { - if (existingEntity.getPrimaryFilters() == null) { - existingEntity.setPrimaryFilters(new HashMap>()); - } - for (Entry> pf : - entity.getPrimaryFilters().entrySet()) { - for (Object pfo : pf.getValue()) { - existingEntity.addPrimaryFilter(pf.getKey(), maybeConvert(pfo)); - } - } - } - if (entity.getOtherInfo() != null) { - if (existingEntity.getOtherInfo() == null) { - existingEntity.setOtherInfo(new HashMap()); - } - for (Entry info : entity.getOtherInfo().entrySet()) { - existingEntity.addOtherInfo(info.getKey(), - maybeConvert(info.getValue())); - } - } - // relate it to other entities - if (entity.getRelatedEntities() == null) { - continue; - } - for (Map.Entry> partRelatedEntities : entity - .getRelatedEntities().entrySet()) { - if (partRelatedEntities == null) { - continue; - } - for (String idStr : partRelatedEntities.getValue()) { - EntityIdentifier relatedEntityId = - new EntityIdentifier(idStr, partRelatedEntities.getKey()); - TimelineEntity relatedEntity = entities.get(relatedEntityId); - if (relatedEntity != null) { - relatedEntity.addRelatedEntity( - existingEntity.getEntityType(), existingEntity.getEntityId()); - } else { - relatedEntity = new TimelineEntity(); - relatedEntity.setEntityId(relatedEntityId.getId()); - relatedEntity.setEntityType(relatedEntityId.getType()); - relatedEntity.setStartTime(existingEntity.getStartTime()); - relatedEntity.addRelatedEntity(existingEntity.getEntityType(), - existingEntity.getEntityId()); - entities.put(relatedEntityId, relatedEntity); - entityInsertTimes.put(relatedEntityId, System.currentTimeMillis()); - } - } - } - } - return response; - } - - private static TimelineEntity maskFields( - TimelineEntity entity, EnumSet fields) { - // Conceal the fields that are not going to be exposed - TimelineEntity entityToReturn = new TimelineEntity(); - entityToReturn.setEntityId(entity.getEntityId()); - entityToReturn.setEntityType(entity.getEntityType()); - entityToReturn.setStartTime(entity.getStartTime()); - // Deep copy - if (fields.contains(Field.EVENTS)) { - entityToReturn.addEvents(entity.getEvents()); - } else if (fields.contains(Field.LAST_EVENT_ONLY)) { - entityToReturn.addEvent(entity.getEvents().get(0)); - } else { - entityToReturn.setEvents(null); - } - if (fields.contains(Field.RELATED_ENTITIES)) { - entityToReturn.addRelatedEntities(entity.getRelatedEntities()); - } else { - entityToReturn.setRelatedEntities(null); - } - if (fields.contains(Field.PRIMARY_FILTERS)) { - entityToReturn.addPrimaryFilters(entity.getPrimaryFilters()); - } else { - entityToReturn.setPrimaryFilters(null); - } - if (fields.contains(Field.OTHER_INFO)) { - entityToReturn.addOtherInfo(entity.getOtherInfo()); - } else { - entityToReturn.setOtherInfo(null); - } - return entityToReturn; - } - - private static boolean matchFilter(Map tags, - NameValuePair filter) { - Object value = tags.get(filter.getName()); - if (value == null) { // doesn't have the filter - return false; - } else if (!value.equals(filter.getValue())) { // doesn't match the filter - return false; - } - return true; - } - - private static boolean matchPrimaryFilter(Map> tags, - NameValuePair filter) { - Set value = tags.get(filter.getName()); - if (value == null) { // doesn't have the filter - return false; - } else { - return value.contains(filter.getValue()); - } - } - - private static Object maybeConvert(Object o) { - if (o instanceof Long) { - Long l = (Long)o; - if (l >= Integer.MIN_VALUE && l <= Integer.MAX_VALUE) { - return l.intValue(); - } - } - return o; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java deleted file mode 100644 index d8dabd2..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * A class holding a name and value pair, used for specifying filters in - * {@link TimelineReader}. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class NameValuePair { - String name; - Object value; - - public NameValuePair(String name, Object value) { - this.name = name; - this.value = value; - } - - /** - * Get the name. - * @return The name. - */ - public String getName() { - - return name; - } - - /** - * Get the value. - * @return The value. - */ - public Object getValue() { - return value; - } - - @Override - public String toString() { - return "{ name: " + name + ", value: " + value + " }"; - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java deleted file mode 100644 index 9ae9954..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java +++ /dev/null @@ -1,155 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import java.io.IOException; -import java.util.Collection; -import java.util.EnumSet; -import java.util.Set; -import java.util.SortedSet; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; - -/** - * This interface is for retrieving timeline information. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface TimelineReader { - - /** - * Possible fields to retrieve for {@link #getEntities} and {@link #getEntity} - * . - */ - enum Field { - EVENTS, - RELATED_ENTITIES, - PRIMARY_FILTERS, - OTHER_INFO, - LAST_EVENT_ONLY - } - - /** - * Default limit for {@link #getEntities} and {@link #getEntityTimelines}. - */ - final long DEFAULT_LIMIT = 100; - - /** - * This method retrieves a list of entity information, {@link TimelineEntity}, - * sorted by the starting timestamp for the entity, descending. The starting - * timestamp of an entity is a timestamp specified by the client. If it is not - * explicitly specified, it will be chosen by the store to be the earliest - * timestamp of the events received in the first put for the entity. - * - * @param entityType - * The type of entities to return (required). - * @param limit - * A limit on the number of entities to return. If null, defaults to - * {@link #DEFAULT_LIMIT}. - * @param windowStart - * The earliest start timestamp to retrieve (exclusive). If null, - * defaults to retrieving all entities until the limit is reached. - * @param windowEnd - * The latest start timestamp to retrieve (inclusive). If null, - * defaults to {@link Long#MAX_VALUE} - * @param fromId - * If fromId is not null, retrieve entities earlier than and - * including the specified ID. If no start time is found for the - * specified ID, an empty list of entities will be returned. The - * windowEnd parameter will take precedence if the start time of this - * entity falls later than windowEnd. - * @param fromTs - * If fromTs is not null, ignore entities that were inserted into the - * store after the given timestamp. The entity's insert timestamp - * used for this comparison is the store's system time when the first - * put for the entity was received (not the entity's start time). - * @param primaryFilter - * Retrieves only entities that have the specified primary filter. If - * null, retrieves all entities. This is an indexed retrieval, and no - * entities that do not match the filter are scanned. - * @param secondaryFilters - * Retrieves only entities that have exact matches for all the - * specified filters in their primary filters or other info. This is - * not an indexed retrieval, so all entities are scanned but only - * those matching the filters are returned. - * @param fieldsToRetrieve - * Specifies which fields of the entity object to retrieve (see - * {@link Field}). If the set of fields contains - * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the - * most recent event for each entity is retrieved. If null, retrieves - * all fields. - * @return An {@link TimelineEntities} object. - * @throws IOException - */ - TimelineEntities getEntities(String entityType, - Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, - NameValuePair primaryFilter, Collection secondaryFilters, - EnumSet fieldsToRetrieve) throws IOException; - - /** - * This method retrieves the entity information for a given entity. - * - * @param entityId - * The entity whose information will be retrieved. - * @param entityType - * The type of the entity. - * @param fieldsToRetrieve - * Specifies which fields of the entity object to retrieve (see - * {@link Field}). If the set of fields contains - * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the - * most recent event for each entity is retrieved. If null, retrieves - * all fields. - * @return An {@link TimelineEntity} object. - * @throws IOException - */ - TimelineEntity getEntity(String entityId, String entityType, EnumSet - fieldsToRetrieve) throws IOException; - - /** - * This method retrieves the events for a list of entities all of the same - * entity type. The events for each entity are sorted in order of their - * timestamps, descending. - * - * @param entityType - * The type of entities to retrieve events for. - * @param entityIds - * The entity IDs to retrieve events for. - * @param limit - * A limit on the number of events to return for each entity. If - * null, defaults to {@link #DEFAULT_LIMIT} events per entity. - * @param windowStart - * If not null, retrieves only events later than the given time - * (exclusive) - * @param windowEnd - * If not null, retrieves only events earlier than the given time - * (inclusive) - * @param eventTypes - * Restricts the events returned to the given types. If null, events - * of all types will be returned. - * @return An {@link TimelineEvents} object. - * @throws IOException - */ - TimelineEvents getEntityTimelines(String entityType, - SortedSet entityIds, Long limit, Long windowStart, - Long windowEnd, Set eventTypes) throws IOException; -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java deleted file mode 100644 index fc02873b..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.service.Service; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; - -@Private -@Unstable -public interface TimelineStore extends - Service, TimelineReader, TimelineWriter { - - /** - * The system filter which will be automatically added to a - * {@link TimelineEntity}'s primary filter section when storing the entity. - * The filter key is case sensitive. Users are supposed not to use the key - * reserved by the timeline system. - */ - @Private - enum SystemFilter { - ENTITY_OWNER - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java deleted file mode 100644 index 8f28d82..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineWriter.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; - -import java.io.IOException; - -/** - * This interface is for storing timeline information. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface TimelineWriter { - - /** - * Stores entity information to the timeline store. Any errors occurring for - * individual put request objects will be reported in the response. - * - * @param data - * An {@link TimelineEntities} object. - * @return An {@link TimelinePutResponse} object. - * @throws IOException - */ - TimelinePutResponse put(TimelineEntities data) throws IOException; - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java deleted file mode 100644 index 970e868..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@InterfaceAudience.Private -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; -import org.apache.hadoop.classification.InterfaceAudience; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java deleted file mode 100644 index 8009b39..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; - -import java.io.IOException; -import java.util.Set; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.security.AdminACLsManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore.SystemFilter; - -import com.google.common.annotations.VisibleForTesting; - -/** - * TimelineACLsManager check the entity level timeline data access. - */ -@Private -public class TimelineACLsManager { - - private static final Log LOG = LogFactory.getLog(TimelineACLsManager.class); - - private AdminACLsManager adminAclsManager; - - public TimelineACLsManager(Configuration conf) { - this.adminAclsManager = new AdminACLsManager(conf); - } - - public boolean checkAccess(UserGroupInformation callerUGI, - TimelineEntity entity) throws YarnException, IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("Verifying the access of " + callerUGI.getShortUserName() - + " on the timeline entity " - + new EntityIdentifier(entity.getEntityId(), entity.getEntityType())); - } - - if (!adminAclsManager.areACLsEnabled()) { - return true; - } - - Set values = - entity.getPrimaryFilters().get( - SystemFilter.ENTITY_OWNER.toString()); - if (values == null || values.size() != 1) { - throw new YarnException("Owner information of the timeline entity " - + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()) - + " is corrupted."); - } - String owner = values.iterator().next().toString(); - // TODO: Currently we just check the user is the admin or the timeline - // entity owner. In the future, we need to check whether the user is in the - // allowed user/group list - if (callerUGI != null - && (adminAclsManager.isAdmin(callerUGI) || - callerUGI.getShortUserName().equals(owner))) { - return true; - } - return false; - } - - @Private - @VisibleForTesting - public AdminACLsManager - setAdminACLsManager(AdminACLsManager adminAclsManager) { - AdminACLsManager oldAdminACLsManager = this.adminAclsManager; - this.adminAclsManager = adminAclsManager; - return oldAdminACLsManager; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilter.java deleted file mode 100644 index 53ef1ed..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilter.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; - -import java.util.Properties; - -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.security.authentication.server.AuthenticationFilter; - -@Private -@Unstable -public class TimelineAuthenticationFilter extends AuthenticationFilter { - - @Override - protected Properties getConfiguration(String configPrefix, - FilterConfig filterConfig) throws ServletException { - // In yarn-site.xml, we can simply set type to "kerberos". However, we need - // to replace the name here to use the customized Kerberos + DT service - // instead of the standard Kerberos handler. - Properties properties = super.getConfiguration(configPrefix, filterConfig); - if (properties.getProperty(AUTH_TYPE).equals("kerberos")) { - properties.setProperty( - AUTH_TYPE, TimelineClientAuthenticationService.class.getName()); - } - return properties; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilterInitializer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilterInitializer.java deleted file mode 100644 index e3c3032..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilterInitializer.java +++ /dev/null @@ -1,127 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; - -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.util.HashMap; -import java.util.Map; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.FilterContainer; -import org.apache.hadoop.http.FilterInitializer; -import org.apache.hadoop.http.HttpServer2; -import org.apache.hadoop.security.SecurityUtil; - -/** - *

- * Initializes {@link TimelineAuthenticationFilter} which provides support for - * Kerberos HTTP SPNEGO authentication. - *

- *

- * It enables Kerberos HTTP SPNEGO plus delegation token authentication for the - * timeline server. - *

- * Refer to the core-default.xml file, after the comment 'HTTP - * Authentication' for details on the configuration options. All related - * configuration properties have 'hadoop.http.authentication.' as prefix. - */ -public class TimelineAuthenticationFilterInitializer extends FilterInitializer { - - /** - * The configuration prefix of timeline Kerberos + DT authentication - */ - public static final String PREFIX = "yarn.timeline-service.http.authentication."; - - private static final String SIGNATURE_SECRET_FILE = - TimelineAuthenticationFilter.SIGNATURE_SECRET + ".file"; - - /** - *

- * Initializes {@link TimelineAuthenticationFilter} - *

- *

- * Propagates to {@link TimelineAuthenticationFilter} configuration all YARN - * configuration properties prefixed with - * "yarn.timeline-service.authentication." - *

- * - * @param container - * The filter container - * @param conf - * Configuration for run-time parameters - */ - @Override - public void initFilter(FilterContainer container, Configuration conf) { - Map filterConfig = new HashMap(); - - // setting the cookie path to root '/' so it is used for all resources. - filterConfig.put(TimelineAuthenticationFilter.COOKIE_PATH, "/"); - - for (Map.Entry entry : conf) { - String name = entry.getKey(); - if (name.startsWith(PREFIX)) { - String value = conf.get(name); - name = name.substring(PREFIX.length()); - filterConfig.put(name, value); - } - } - - String signatureSecretFile = filterConfig.get(SIGNATURE_SECRET_FILE); - if (signatureSecretFile != null) { - try { - StringBuilder secret = new StringBuilder(); - Reader reader = new FileReader(signatureSecretFile); - int c = reader.read(); - while (c > -1) { - secret.append((char) c); - c = reader.read(); - } - reader.close(); - filterConfig - .put(TimelineAuthenticationFilter.SIGNATURE_SECRET, - secret.toString()); - } catch (IOException ex) { - throw new RuntimeException( - "Could not read HTTP signature secret file: " - + signatureSecretFile); - } - } - - // Resolve _HOST into bind address - String bindAddress = conf.get(HttpServer2.BIND_ADDRESS); - String principal = - filterConfig.get(TimelineClientAuthenticationService.PRINCIPAL); - if (principal != null) { - try { - principal = SecurityUtil.getServerPrincipal(principal, bindAddress); - } catch (IOException ex) { - throw new RuntimeException( - "Could not resolve Kerberos principal name: " + ex.toString(), ex); - } - filterConfig.put(TimelineClientAuthenticationService.PRINCIPAL, - principal); - } - - container.addGlobalFilter("Timeline Authentication Filter", - TimelineAuthenticationFilter.class.getName(), - filterConfig); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineClientAuthenticationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineClientAuthenticationService.java deleted file mode 100644 index f11633d..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineClientAuthenticationService.java +++ /dev/null @@ -1,236 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; - -import java.io.IOException; -import java.io.Writer; -import java.text.MessageFormat; -import java.util.HashSet; -import java.util.Set; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.MediaType; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.api.records.timeline.TimelineDelegationTokenResponse; -import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; -import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenOperation; -import org.apache.hadoop.yarn.security.client.TimelineAuthenticationConsts; -import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; -import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; - -/** - * Server side AuthenticationHandler that authenticates requests - * using the incoming delegation token as a 'delegation' query string parameter. - *

- * If not delegation token is present in the request it delegates to the - * {@link KerberosAuthenticationHandler} - */ -@Private -@Unstable -public class TimelineClientAuthenticationService - extends KerberosAuthenticationHandler { - - public static final String TYPE = "kerberos-dt"; - private static final Set DELEGATION_TOKEN_OPS = new HashSet(); - private static final String OP_PARAM = "op"; - private static final String ENTER = System.getProperty("line.separator"); - - private ObjectMapper mapper; - - static { - DELEGATION_TOKEN_OPS.add( - TimelineDelegationTokenOperation.GETDELEGATIONTOKEN.toString()); - DELEGATION_TOKEN_OPS.add( - TimelineDelegationTokenOperation.RENEWDELEGATIONTOKEN.toString()); - DELEGATION_TOKEN_OPS.add( - TimelineDelegationTokenOperation.CANCELDELEGATIONTOKEN.toString()); - } - - public TimelineClientAuthenticationService() { - super(); - mapper = new ObjectMapper(); - YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); - } - - /** - * Returns authentication type of the handler. - * - * @return delegationtoken-kerberos - */ - @Override - public String getType() { - return TYPE; - } - - @Override - public boolean managementOperation(AuthenticationToken token, - HttpServletRequest request, HttpServletResponse response) - throws IOException, AuthenticationException { - boolean requestContinues = true; - String op = request.getParameter(OP_PARAM); - op = (op != null) ? op.toUpperCase() : null; - if (DELEGATION_TOKEN_OPS.contains(op) && - !request.getMethod().equals("OPTIONS")) { - TimelineDelegationTokenOperation dtOp = - TimelineDelegationTokenOperation.valueOf(op); - if (dtOp.getHttpMethod().equals(request.getMethod())) { - if (dtOp.requiresKerberosCredentials() && token == null) { - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, - MessageFormat.format( - "Operation [{0}] requires SPNEGO authentication established", - dtOp)); - requestContinues = false; - } else { - TimelineDelegationTokenSecretManagerService secretManager = - AHSWebApp.getInstance() - .getTimelineDelegationTokenSecretManagerService(); - try { - TimelineDelegationTokenResponse res = null; - switch (dtOp) { - case GETDELEGATIONTOKEN: - UserGroupInformation ownerUGI = - UserGroupInformation.createRemoteUser(token.getUserName()); - String renewerParam = - request - .getParameter(TimelineAuthenticationConsts.RENEWER_PARAM); - if (renewerParam == null) { - renewerParam = token.getUserName(); - } - Token dToken = - secretManager.createToken(ownerUGI, renewerParam); - res = new TimelineDelegationTokenResponse(); - res.setType(TimelineAuthenticationConsts.DELEGATION_TOKEN_URL); - res.setContent(dToken.encodeToUrlString()); - break; - case RENEWDELEGATIONTOKEN: - case CANCELDELEGATIONTOKEN: - String tokenParam = - request - .getParameter(TimelineAuthenticationConsts.TOKEN_PARAM); - if (tokenParam == null) { - response.sendError(HttpServletResponse.SC_BAD_REQUEST, - MessageFormat - .format( - "Operation [{0}] requires the parameter [{1}]", - dtOp, - TimelineAuthenticationConsts.TOKEN_PARAM)); - requestContinues = false; - } else { - if (dtOp == TimelineDelegationTokenOperation.CANCELDELEGATIONTOKEN) { - Token dt = - new Token(); - dt.decodeFromUrlString(tokenParam); - secretManager.cancelToken(dt, token.getUserName()); - } else { - Token dt = - new Token(); - dt.decodeFromUrlString(tokenParam); - long expirationTime = - secretManager.renewToken(dt, token.getUserName()); - res = new TimelineDelegationTokenResponse(); - res.setType(TimelineAuthenticationConsts.DELEGATION_TOKEN_EXPIRATION_TIME); - res.setContent(expirationTime); - } - } - break; - } - if (requestContinues) { - response.setStatus(HttpServletResponse.SC_OK); - if (res != null) { - response.setContentType(MediaType.APPLICATION_JSON); - Writer writer = response.getWriter(); - mapper.writeValue(writer, res); - writer.write(ENTER); - writer.flush(); - - } - requestContinues = false; - } - } catch (IOException e) { - throw new AuthenticationException(e.toString(), e); - } - } - } else { - response - .sendError( - HttpServletResponse.SC_BAD_REQUEST, - MessageFormat - .format( - "Wrong HTTP method [{0}] for operation [{1}], it should be [{2}]", - request.getMethod(), dtOp, dtOp.getHttpMethod())); - requestContinues = false; - } - } - return requestContinues; - } - - /** - * Authenticates a request looking for the delegation - * query-string parameter and verifying it is a valid token. If there is not - * delegation query-string parameter, it delegates the - * authentication to the {@link KerberosAuthenticationHandler} unless it is - * disabled. - * - * @param request - * the HTTP client request. - * @param response - * the HTTP client response. - * - * @return the authentication token for the authenticated request. - * @throws IOException - * thrown if an IO error occurred. - * @throws AuthenticationException - * thrown if the authentication failed. - */ - @Override - public AuthenticationToken authenticate(HttpServletRequest request, - HttpServletResponse response) - throws IOException, AuthenticationException { - AuthenticationToken token; - String delegationParam = - request - .getParameter(TimelineAuthenticationConsts.DELEGATION_PARAM); - if (delegationParam != null) { - Token dt = - new Token(); - dt.decodeFromUrlString(delegationParam); - TimelineDelegationTokenSecretManagerService secretManager = - AHSWebApp.getInstance() - .getTimelineDelegationTokenSecretManagerService(); - UserGroupInformation ugi = secretManager.verifyToken(dt); - final String shortName = ugi.getShortUserName(); - // creating a ephemeral token - token = new AuthenticationToken(shortName, ugi.getUserName(), getType()); - token.setExpires(0); - } else { - token = super.authenticate(request, response); - } - return token; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java deleted file mode 100644 index 2808dac..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java +++ /dev/null @@ -1,180 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; - -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.io.IOException; -import java.net.InetSocketAddress; - -import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; -import org.apache.hadoop.yarn.util.timeline.TimelineUtils; - -/** - * The service wrapper of {@link TimelineDelegationTokenSecretManager} - */ -@Private -@Unstable -public class TimelineDelegationTokenSecretManagerService extends AbstractService { - - private TimelineDelegationTokenSecretManager secretManager = null; - private InetSocketAddress serviceAddr = null; - - public TimelineDelegationTokenSecretManagerService() { - super(TimelineDelegationTokenSecretManagerService.class.getName()); - } - - @Override - protected void serviceInit(Configuration conf) throws Exception { - long secretKeyInterval = - conf.getLong(YarnConfiguration.DELEGATION_KEY_UPDATE_INTERVAL_KEY, - YarnConfiguration.DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT); - long tokenMaxLifetime = - conf.getLong(YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_KEY, - YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT); - long tokenRenewInterval = - conf.getLong(YarnConfiguration.DELEGATION_TOKEN_RENEW_INTERVAL_KEY, - YarnConfiguration.DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT); - secretManager = new TimelineDelegationTokenSecretManager(secretKeyInterval, - tokenMaxLifetime, tokenRenewInterval, - 3600000); - secretManager.startThreads(); - - serviceAddr = TimelineUtils.getTimelineTokenServiceAddress(getConfig()); - super.init(conf); - } - - @Override - protected void serviceStop() throws Exception { - secretManager.stopThreads(); - super.stop(); - } - - /** - * Creates a delegation token. - * - * @param ugi UGI creating the token. - * @param renewer token renewer. - * @return new delegation token. - * @throws IOException thrown if the token could not be created. - */ - public Token createToken( - UserGroupInformation ugi, String renewer) throws IOException { - renewer = (renewer == null) ? ugi.getShortUserName() : renewer; - String user = ugi.getUserName(); - Text owner = new Text(user); - Text realUser = null; - if (ugi.getRealUser() != null) { - realUser = new Text(ugi.getRealUser().getUserName()); - } - TimelineDelegationTokenIdentifier tokenIdentifier = - new TimelineDelegationTokenIdentifier(owner, new Text(renewer), realUser); - Token token = - new Token(tokenIdentifier, secretManager); - SecurityUtil.setTokenService(token, serviceAddr); - return token; - } - - /** - * Renews a delegation token. - * - * @param token delegation token to renew. - * @param renewer token renewer. - * @throws IOException thrown if the token could not be renewed. - */ - public long renewToken(Token token, - String renewer) throws IOException { - return secretManager.renewToken(token, renewer); - } - - /** - * Cancels a delegation token. - * - * @param token delegation token to cancel. - * @param canceler token canceler. - * @throws IOException thrown if the token could not be canceled. - */ - public void cancelToken(Token token, - String canceler) throws IOException { - secretManager.cancelToken(token, canceler); - } - - /** - * Verifies a delegation token. - * - * @param token delegation token to verify. - * @return the UGI for the token. - * @throws IOException thrown if the token could not be verified. - */ - public UserGroupInformation verifyToken(Token token) - throws IOException { - ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier()); - DataInputStream dis = new DataInputStream(buf); - TimelineDelegationTokenIdentifier id = new TimelineDelegationTokenIdentifier(); - try { - id.readFields(dis); - secretManager.verifyToken(id, token.getPassword()); - } finally { - dis.close(); - } - return id.getUser(); - } - - /** - * Create a timeline secret manager - * - * @param delegationKeyUpdateInterval - * the number of seconds for rolling new secret keys. - * @param delegationTokenMaxLifetime - * the maximum lifetime of the delegation tokens - * @param delegationTokenRenewInterval - * how often the tokens must be renewed - * @param delegationTokenRemoverScanInterval - * how often the tokens are scanned for expired tokens - */ - @Private - @Unstable - public static class TimelineDelegationTokenSecretManager extends - AbstractDelegationTokenSecretManager { - - public TimelineDelegationTokenSecretManager(long delegationKeyUpdateInterval, - long delegationTokenMaxLifetime, long delegationTokenRenewInterval, - long delegationTokenRemoverScanInterval) { - super(delegationKeyUpdateInterval, delegationTokenMaxLifetime, - delegationTokenRenewInterval, delegationTokenRemoverScanInterval); - } - - @Override - public TimelineDelegationTokenIdentifier createIdentifier() { - return new TimelineDelegationTokenIdentifier(); - } - - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java index 17b1e62..9901eeb 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java @@ -22,9 +22,10 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.yarn.server.api.ApplicationContext; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineDelegationTokenSecretManagerService; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; +import org.apache.hadoop.yarn.server.timeline.security.TimelineDelegationTokenSecretManagerService; +import org.apache.hadoop.yarn.server.timeline.webapp.TimelineWebServices; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java deleted file mode 100644 index 5d749fa..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java +++ /dev/null @@ -1,539 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; - -import static org.apache.hadoop.yarn.util.StringHelper.CSV_JOINER; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.EnumSet; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Unstable; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; -import org.apache.hadoop.yarn.util.timeline.TimelineUtils; -import org.apache.hadoop.yarn.webapp.BadRequestException; -import org.apache.hadoop.yarn.webapp.NotFoundException; - -import com.google.inject.Inject; -import com.google.inject.Singleton; - -@Singleton -@Path("/ws/v1/timeline") -//TODO: support XML serialization/deserialization -public class TimelineWebServices { - - private static final Log LOG = LogFactory.getLog(TimelineWebServices.class); - - private TimelineStore store; - private TimelineACLsManager timelineACLsManager; - - @Inject - public TimelineWebServices(TimelineStore store, - TimelineACLsManager timelineACLsManager) { - this.store = store; - this.timelineACLsManager = timelineACLsManager; - } - - @XmlRootElement(name = "about") - @XmlAccessorType(XmlAccessType.NONE) - @Public - @Unstable - public static class AboutInfo { - - private String about; - - public AboutInfo() { - - } - - public AboutInfo(String about) { - this.about = about; - } - - @XmlElement(name = "About") - public String getAbout() { - return about; - } - - public void setAbout(String about) { - this.about = about; - } - - } - - /** - * Return the description of the timeline web services. - */ - @GET - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public AboutInfo about( - @Context HttpServletRequest req, - @Context HttpServletResponse res) { - init(res); - return new AboutInfo("Timeline API"); - } - - /** - * Return a list of entities that match the given parameters. - */ - @GET - @Path("/{entityType}") - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public TimelineEntities getEntities( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - @PathParam("entityType") String entityType, - @QueryParam("primaryFilter") String primaryFilter, - @QueryParam("secondaryFilter") String secondaryFilter, - @QueryParam("windowStart") String windowStart, - @QueryParam("windowEnd") String windowEnd, - @QueryParam("fromId") String fromId, - @QueryParam("fromTs") String fromTs, - @QueryParam("limit") String limit, - @QueryParam("fields") String fields) { - init(res); - TimelineEntities entities = null; - try { - EnumSet fieldEnums = parseFieldsStr(fields, ","); - boolean modified = extendFields(fieldEnums); - UserGroupInformation callerUGI = getUser(req); - entities = store.getEntities( - parseStr(entityType), - parseLongStr(limit), - parseLongStr(windowStart), - parseLongStr(windowEnd), - parseStr(fromId), - parseLongStr(fromTs), - parsePairStr(primaryFilter, ":"), - parsePairsStr(secondaryFilter, ",", ":"), - fieldEnums); - if (entities != null) { - Iterator entitiesItr = - entities.getEntities().iterator(); - while (entitiesItr.hasNext()) { - TimelineEntity entity = entitiesItr.next(); - try { - // check ACLs - if (!timelineACLsManager.checkAccess(callerUGI, entity)) { - entitiesItr.remove(); - } else { - // clean up system data - if (modified) { - entity.setPrimaryFilters(null); - } else { - cleanupOwnerInfo(entity); - } - } - } catch (YarnException e) { - LOG.error("Error when verifying access for user " + callerUGI - + " on the events of the timeline entity " - + new EntityIdentifier(entity.getEntityId(), - entity.getEntityType()), e); - entitiesItr.remove(); - } - } - } - } catch (NumberFormatException e) { - throw new BadRequestException( - "windowStart, windowEnd or limit is not a numeric value."); - } catch (IllegalArgumentException e) { - throw new BadRequestException("requested invalid field."); - } catch (IOException e) { - LOG.error("Error getting entities", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - if (entities == null) { - return new TimelineEntities(); - } - return entities; - } - - /** - * Return a single entity of the given entity type and Id. - */ - @GET - @Path("/{entityType}/{entityId}") - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public TimelineEntity getEntity( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - @PathParam("entityType") String entityType, - @PathParam("entityId") String entityId, - @QueryParam("fields") String fields) { - init(res); - TimelineEntity entity = null; - try { - EnumSet fieldEnums = parseFieldsStr(fields, ","); - boolean modified = extendFields(fieldEnums); - entity = - store.getEntity(parseStr(entityId), parseStr(entityType), - fieldEnums); - if (entity != null) { - // check ACLs - UserGroupInformation callerUGI = getUser(req); - if (!timelineACLsManager.checkAccess(callerUGI, entity)) { - entity = null; - } else { - // clean up the system data - if (modified) { - entity.setPrimaryFilters(null); - } else { - cleanupOwnerInfo(entity); - } - } - } - } catch (IllegalArgumentException e) { - throw new BadRequestException( - "requested invalid field."); - } catch (IOException e) { - LOG.error("Error getting entity", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } catch (YarnException e) { - LOG.error("Error getting entity", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - if (entity == null) { - throw new NotFoundException("Timeline entity " - + new EntityIdentifier(parseStr(entityId), parseStr(entityType)) - + " is not found"); - } - return entity; - } - - /** - * Return the events that match the given parameters. - */ - @GET - @Path("/{entityType}/events") - @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public TimelineEvents getEvents( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - @PathParam("entityType") String entityType, - @QueryParam("entityId") String entityId, - @QueryParam("eventType") String eventType, - @QueryParam("windowStart") String windowStart, - @QueryParam("windowEnd") String windowEnd, - @QueryParam("limit") String limit) { - init(res); - TimelineEvents events = null; - try { - UserGroupInformation callerUGI = getUser(req); - events = store.getEntityTimelines( - parseStr(entityType), - parseArrayStr(entityId, ","), - parseLongStr(limit), - parseLongStr(windowStart), - parseLongStr(windowEnd), - parseArrayStr(eventType, ",")); - if (events != null) { - Iterator eventsItr = - events.getAllEvents().iterator(); - while (eventsItr.hasNext()) { - TimelineEvents.EventsOfOneEntity eventsOfOneEntity = eventsItr.next(); - try { - TimelineEntity entity = store.getEntity( - eventsOfOneEntity.getEntityId(), - eventsOfOneEntity.getEntityType(), - EnumSet.of(Field.PRIMARY_FILTERS)); - // check ACLs - if (!timelineACLsManager.checkAccess(callerUGI, entity)) { - eventsItr.remove(); - } - } catch (Exception e) { - LOG.error("Error when verifying access for user " + callerUGI - + " on the events of the timeline entity " - + new EntityIdentifier(eventsOfOneEntity.getEntityId(), - eventsOfOneEntity.getEntityType()), e); - eventsItr.remove(); - } - } - } - } catch (NumberFormatException e) { - throw new BadRequestException( - "windowStart, windowEnd or limit is not a numeric value."); - } catch (IOException e) { - LOG.error("Error getting entity timelines", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - if (events == null) { - return new TimelineEvents(); - } - return events; - } - - /** - * Store the given entities into the timeline store, and return the errors - * that happen during storing. - */ - @POST - @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) - public TimelinePutResponse postEntities( - @Context HttpServletRequest req, - @Context HttpServletResponse res, - TimelineEntities entities) { - init(res); - if (entities == null) { - return new TimelinePutResponse(); - } - UserGroupInformation callerUGI = getUser(req); - try { - List entityIDs = new ArrayList(); - TimelineEntities entitiesToPut = new TimelineEntities(); - List errors = - new ArrayList(); - for (TimelineEntity entity : entities.getEntities()) { - EntityIdentifier entityID = - new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); - - // check if there is existing entity - TimelineEntity existingEntity = null; - try { - existingEntity = - store.getEntity(entityID.getId(), entityID.getType(), - EnumSet.of(Field.PRIMARY_FILTERS)); - if (existingEntity != null - && !timelineACLsManager.checkAccess(callerUGI, existingEntity)) { - throw new YarnException("The timeline entity " + entityID - + " was not put by " + callerUGI + " before"); - } - } catch (Exception e) { - // Skip the entity which already exists and was put by others - LOG.warn("Skip the timeline entity: " + entityID + ", because " - + e.getMessage()); - TimelinePutResponse.TimelinePutError error = - new TimelinePutResponse.TimelinePutError(); - error.setEntityId(entityID.getId()); - error.setEntityType(entityID.getType()); - error.setErrorCode( - TimelinePutResponse.TimelinePutError.ACCESS_DENIED); - errors.add(error); - continue; - } - - // inject owner information for the access check if this is the first - // time to post the entity, in case it's the admin who is updating - // the timeline data. - try { - if (existingEntity == null) { - injectOwnerInfo(entity, - callerUGI == null ? "" : callerUGI.getShortUserName()); - } - } catch (YarnException e) { - // Skip the entity which messes up the primary filter and record the - // error - LOG.warn("Skip the timeline entity: " + entityID + ", because " - + e.getMessage()); - TimelinePutResponse.TimelinePutError error = - new TimelinePutResponse.TimelinePutError(); - error.setEntityId(entityID.getId()); - error.setEntityType(entityID.getType()); - error.setErrorCode( - TimelinePutResponse.TimelinePutError.SYSTEM_FILTER_CONFLICT); - errors.add(error); - continue; - } - - entityIDs.add(entityID); - entitiesToPut.addEntity(entity); - if (LOG.isDebugEnabled()) { - LOG.debug("Storing the entity " + entityID + ", JSON-style content: " - + TimelineUtils.dumpTimelineRecordtoJSON(entity)); - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs)); - } - TimelinePutResponse response = store.put(entitiesToPut); - // add the errors of timeline system filter key conflict - response.addErrors(errors); - return response; - } catch (IOException e) { - LOG.error("Error putting entities", e); - throw new WebApplicationException(e, - Response.Status.INTERNAL_SERVER_ERROR); - } - } - - private void init(HttpServletResponse response) { - response.setContentType(null); - } - - private static SortedSet parseArrayStr(String str, String delimiter) { - if (str == null) { - return null; - } - SortedSet strSet = new TreeSet(); - String[] strs = str.split(delimiter); - for (String aStr : strs) { - strSet.add(aStr.trim()); - } - return strSet; - } - - private static NameValuePair parsePairStr(String str, String delimiter) { - if (str == null) { - return null; - } - String[] strs = str.split(delimiter, 2); - try { - return new NameValuePair(strs[0].trim(), - GenericObjectMapper.OBJECT_READER.readValue(strs[1].trim())); - } catch (Exception e) { - // didn't work as an Object, keep it as a String - return new NameValuePair(strs[0].trim(), strs[1].trim()); - } - } - - private static Collection parsePairsStr( - String str, String aDelimiter, String pDelimiter) { - if (str == null) { - return null; - } - String[] strs = str.split(aDelimiter); - Set pairs = new HashSet(); - for (String aStr : strs) { - pairs.add(parsePairStr(aStr, pDelimiter)); - } - return pairs; - } - - private static EnumSet parseFieldsStr(String str, String delimiter) { - if (str == null) { - return null; - } - String[] strs = str.split(delimiter); - List fieldList = new ArrayList(); - for (String s : strs) { - s = s.trim().toUpperCase(); - if (s.equals("EVENTS")) { - fieldList.add(Field.EVENTS); - } else if (s.equals("LASTEVENTONLY")) { - fieldList.add(Field.LAST_EVENT_ONLY); - } else if (s.equals("RELATEDENTITIES")) { - fieldList.add(Field.RELATED_ENTITIES); - } else if (s.equals("PRIMARYFILTERS")) { - fieldList.add(Field.PRIMARY_FILTERS); - } else if (s.equals("OTHERINFO")) { - fieldList.add(Field.OTHER_INFO); - } else { - throw new IllegalArgumentException("Requested nonexistent field " + s); - } - } - if (fieldList.size() == 0) { - return null; - } - Field f1 = fieldList.remove(fieldList.size() - 1); - if (fieldList.size() == 0) { - return EnumSet.of(f1); - } else { - return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()])); - } - } - - private static boolean extendFields(EnumSet fieldEnums) { - boolean modified = false; - if (fieldEnums != null && !fieldEnums.contains(Field.PRIMARY_FILTERS)) { - fieldEnums.add(Field.PRIMARY_FILTERS); - modified = true; - } - return modified; - } - private static Long parseLongStr(String str) { - return str == null ? null : Long.parseLong(str.trim()); - } - - private static String parseStr(String str) { - return str == null ? null : str.trim(); - } - - private static UserGroupInformation getUser(HttpServletRequest req) { - String remoteUser = req.getRemoteUser(); - UserGroupInformation callerUGI = null; - if (remoteUser != null) { - callerUGI = UserGroupInformation.createRemoteUser(remoteUser); - } - return callerUGI; - } - - private static void injectOwnerInfo(TimelineEntity timelineEntity, - String owner) throws YarnException { - if (timelineEntity.getPrimaryFilters() != null && - timelineEntity.getPrimaryFilters().containsKey( - TimelineStore.SystemFilter.ENTITY_OWNER)) { - throw new YarnException( - "User should not use the timeline system filter key: " - + TimelineStore.SystemFilter.ENTITY_OWNER); - } - timelineEntity.addPrimaryFilter( - TimelineStore.SystemFilter.ENTITY_OWNER - .toString(), owner); - } - - private static void cleanupOwnerInfo(TimelineEntity timelineEntity) { - if (timelineEntity.getPrimaryFilters() != null) { - timelineEntity.getPrimaryFilters().remove( - TimelineStore.SystemFilter.ENTITY_OWNER.toString()); - } - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityIdentifier.java new file mode 100644 index 0000000..60a9aa7 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityIdentifier.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * The unique identifier for an entity + */ +@Private +@Unstable +public class EntityIdentifier implements Comparable { + + private String id; + private String type; + + public EntityIdentifier(String id, String type) { + this.id = id; + this.type = type; + } + + /** + * Get the entity Id. + * @return The entity Id. + */ + public String getId() { + return id; + } + + /** + * Get the entity type. + * @return The entity type. + */ + public String getType() { + return type; + } + + @Override + public int compareTo(EntityIdentifier other) { + int c = type.compareTo(other.type); + if (c != 0) return c; + return id.compareTo(other.id); + } + + @Override + public int hashCode() { + // generated by eclipse + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + // generated by eclipse + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + EntityIdentifier other = (EntityIdentifier) obj; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + return true; + } + + @Override + public String toString() { + return "{ id: " + id + ", type: "+ type + " }"; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java new file mode 100644 index 0000000..d235ff7 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timeline; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; +import org.codehaus.jackson.map.ObjectWriter; + +/** + * A utility class providing methods for serializing and deserializing + * objects. The {@link #write(Object)} and {@link #read(byte[])} methods are + * used by the {@link LeveldbTimelineStore} to store and retrieve arbitrary + * JSON, while the {@link #writeReverseOrderedLong} and {@link + * #readReverseOrderedLong} methods are used to sort entities in descending + * start time order. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class GenericObjectMapper { + private static final byte[] EMPTY_BYTES = new byte[0]; + + public static final ObjectReader OBJECT_READER; + public static final ObjectWriter OBJECT_WRITER; + + static { + ObjectMapper mapper = new ObjectMapper(); + OBJECT_READER = mapper.reader(Object.class); + OBJECT_WRITER = mapper.writer(); + } + + /** + * Serializes an Object into a byte array. Along with {@link #read(byte[])}, + * can be used to serialize an Object and deserialize it into an Object of + * the same type without needing to specify the Object's type, + * as long as it is one of the JSON-compatible objects understood by + * ObjectMapper. + * + * @param o An Object + * @return A byte array representation of the Object + * @throws IOException if there is a write error + */ + public static byte[] write(Object o) throws IOException { + if (o == null) { + return EMPTY_BYTES; + } + return OBJECT_WRITER.writeValueAsBytes(o); + } + + /** + * Deserializes an Object from a byte array created with + * {@link #write(Object)}. + * + * @param b A byte array + * @return An Object + * @throws IOException if there is a read error + */ + public static Object read(byte[] b) throws IOException { + return read(b, 0); + } + + /** + * Deserializes an Object from a byte array at a specified offset, assuming + * the bytes were created with {@link #write(Object)}. + * + * @param b A byte array + * @param offset Offset into the array + * @return An Object + * @throws IOException if there is a read error + */ + public static Object read(byte[] b, int offset) throws IOException { + if (b == null || b.length == 0) { + return null; + } + return OBJECT_READER.readValue(b, offset, b.length - offset); + } + + /** + * Converts a long to a 8-byte array so that lexicographic ordering of the + * produced byte arrays sort the longs in descending order. + * + * @param l A long + * @return A byte array + */ + public static byte[] writeReverseOrderedLong(long l) { + byte[] b = new byte[8]; + return writeReverseOrderedLong(l, b, 0); + } + + public static byte[] writeReverseOrderedLong(long l, byte[] b, int offset) { + b[offset] = (byte)(0x7f ^ ((l >> 56) & 0xff)); + for (int i = offset+1; i < offset+7; i++) { + b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff)); + } + b[offset+7] = (byte)(0xff ^ (l & 0xff)); + return b; + } + + /** + * Reads 8 bytes from an array starting at the specified offset and + * converts them to a long. The bytes are assumed to have been created + * with {@link #writeReverseOrderedLong}. + * + * @param b A byte array + * @param offset An offset into the byte array + * @return A long + */ + public static long readReverseOrderedLong(byte[] b, int offset) { + long l = b[offset] & 0xff; + for (int i = 1; i < 8; i++) { + l = l << 8; + l = l | (b[offset+i]&0xff); + } + return l ^ 0x7fffffffffffffffl; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java new file mode 100644 index 0000000..9495700 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java @@ -0,0 +1,1492 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import static org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong; +import static org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.concurrent.locks.ReentrantLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import org.apache.commons.collections.map.LRUMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.WritableComparator; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.fusesource.leveldbjni.JniDBFactory; +import org.iq80.leveldb.DB; +import org.iq80.leveldb.DBIterator; +import org.iq80.leveldb.Options; +import org.iq80.leveldb.ReadOptions; +import org.iq80.leveldb.WriteBatch; +import org.iq80.leveldb.WriteOptions; + +import com.google.common.annotations.VisibleForTesting; + +/** + *

An implementation of an application timeline store backed by leveldb.

+ * + *

There are three sections of the db, the start time section, + * the entity section, and the indexed entity section.

+ * + *

The start time section is used to retrieve the unique start time for + * a given entity. Its values each contain a start time while its keys are of + * the form:

+ *
+ *   START_TIME_LOOKUP_PREFIX + entity type + entity id
+ * + *

The entity section is ordered by entity type, then entity start time + * descending, then entity ID. There are four sub-sections of the entity + * section: events, primary filters, related entities, + * and other info. The event entries have event info serialized into their + * values. The other info entries have values corresponding to the values of + * the other info name/value map for the entry (note the names are contained + * in the key). All other entries have empty values. The key structure is as + * follows:

+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id
+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
+ *     EVENTS_COLUMN + reveventtimestamp + eventtype
+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
+ *     PRIMARY_FILTERS_COLUMN + name + value
+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
+ *     OTHER_INFO_COLUMN + name
+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
+ *     RELATED_ENTITIES_COLUMN + relatedentity type + relatedentity id
+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
+ *     INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN + relatedentity type +
+ *     relatedentity id
+ * + *

The indexed entity section contains a primary filter name and primary + * filter value as the prefix. Within a given name/value, entire entity + * entries are stored in the same format as described in the entity section + * above (below, "key" represents any one of the possible entity entry keys + * described above).

+ *
+ *   INDEXED_ENTRY_PREFIX + primaryfilter name + primaryfilter value +
+ *     key
+ */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class LeveldbTimelineStore extends AbstractService + implements TimelineStore { + private static final Log LOG = LogFactory + .getLog(LeveldbTimelineStore.class); + + @Private + @VisibleForTesting + static final String FILENAME = "leveldb-timeline-store.ldb"; + + private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(); + private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(); + private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(); + + private static final byte[] EVENTS_COLUMN = "e".getBytes(); + private static final byte[] PRIMARY_FILTERS_COLUMN = "f".getBytes(); + private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(); + private static final byte[] RELATED_ENTITIES_COLUMN = "r".getBytes(); + private static final byte[] INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN = + "z".getBytes(); + + private static final byte[] EMPTY_BYTES = new byte[0]; + + @Private + @VisibleForTesting + static final FsPermission LEVELDB_DIR_UMASK = FsPermission + .createImmutable((short) 0700); + + private Map startTimeWriteCache; + private Map startTimeReadCache; + + /** + * Per-entity locks are obtained when writing. + */ + private final LockMap writeLocks = + new LockMap(); + + private final ReentrantReadWriteLock deleteLock = + new ReentrantReadWriteLock(); + + private DB db; + + private Thread deletionThread; + + public LeveldbTimelineStore() { + super(LeveldbTimelineStore.class.getName()); + } + + @Override + @SuppressWarnings("unchecked") + protected void serviceInit(Configuration conf) throws Exception { + Options options = new Options(); + options.createIfMissing(true); + options.cacheSize(conf.getLong( + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE)); + JniDBFactory factory = new JniDBFactory(); + Path dbPath = new Path( + conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH), FILENAME); + FileSystem localFS = null; + try { + localFS = FileSystem.getLocal(conf); + if (!localFS.exists(dbPath)) { + if (!localFS.mkdirs(dbPath)) { + throw new IOException("Couldn't create directory for leveldb " + + "timeline store " + dbPath); + } + localFS.setPermission(dbPath, LEVELDB_DIR_UMASK); + } + } finally { + IOUtils.cleanup(LOG, localFS); + } + LOG.info("Using leveldb path " + dbPath); + db = factory.open(new File(dbPath.toString()), options); + startTimeWriteCache = + Collections.synchronizedMap(new LRUMap(getStartTimeWriteCacheSize( + conf))); + startTimeReadCache = + Collections.synchronizedMap(new LRUMap(getStartTimeReadCacheSize( + conf))); + + if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, true)) { + deletionThread = new EntityDeletionThread(conf); + deletionThread.start(); + } + + super.serviceInit(conf); + } + + @Override + protected void serviceStop() throws Exception { + if (deletionThread != null) { + deletionThread.interrupt(); + LOG.info("Waiting for deletion thread to complete its current action"); + try { + deletionThread.join(); + } catch (InterruptedException e) { + LOG.warn("Interrupted while waiting for deletion thread to complete," + + " closing db now", e); + } + } + IOUtils.cleanup(LOG, db); + super.serviceStop(); + } + + private static class StartAndInsertTime { + final long startTime; + final long insertTime; + + public StartAndInsertTime(long startTime, long insertTime) { + this.startTime = startTime; + this.insertTime = insertTime; + } + } + + private class EntityDeletionThread extends Thread { + private final long ttl; + private final long ttlInterval; + + public EntityDeletionThread(Configuration conf) { + ttl = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_TTL_MS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_TTL_MS); + ttlInterval = conf.getLong( + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS); + LOG.info("Starting deletion thread with ttl " + ttl + " and cycle " + + "interval " + ttlInterval); + } + + @Override + public void run() { + while (true) { + long timestamp = System.currentTimeMillis() - ttl; + try { + discardOldEntities(timestamp); + Thread.sleep(ttlInterval); + } catch (IOException e) { + LOG.error(e); + } catch (InterruptedException e) { + LOG.info("Deletion thread received interrupt, exiting"); + break; + } + } + } + } + + private static class LockMap { + private static class CountingReentrantLock extends ReentrantLock { + private static final long serialVersionUID = 1L; + private int count; + private K key; + + CountingReentrantLock(K key) { + super(); + this.count = 0; + this.key = key; + } + } + + private Map> locks = + new HashMap>(); + + synchronized CountingReentrantLock getLock(K key) { + CountingReentrantLock lock = locks.get(key); + if (lock == null) { + lock = new CountingReentrantLock(key); + locks.put(key, lock); + } + + lock.count++; + return lock; + } + + synchronized void returnLock(CountingReentrantLock lock) { + if (lock.count == 0) { + throw new IllegalStateException("Returned lock more times than it " + + "was retrieved"); + } + lock.count--; + + if (lock.count == 0) { + locks.remove(lock.key); + } + } + } + + private static class KeyBuilder { + private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10; + private byte[][] b; + private boolean[] useSeparator; + private int index; + private int length; + + public KeyBuilder(int size) { + b = new byte[size][]; + useSeparator = new boolean[size]; + index = 0; + length = 0; + } + + public static KeyBuilder newInstance() { + return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS); + } + + public KeyBuilder add(String s) { + return add(s.getBytes(), true); + } + + public KeyBuilder add(byte[] t) { + return add(t, false); + } + + public KeyBuilder add(byte[] t, boolean sep) { + b[index] = t; + useSeparator[index] = sep; + length += t.length; + if (sep) { + length++; + } + index++; + return this; + } + + public byte[] getBytes() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + for (int i = 0; i < index; i++) { + baos.write(b[i]); + if (i < index-1 && useSeparator[i]) { + baos.write(0x0); + } + } + return baos.toByteArray(); + } + + public byte[] getBytesForLookup() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + for (int i = 0; i < index; i++) { + baos.write(b[i]); + if (useSeparator[i]) { + baos.write(0x0); + } + } + return baos.toByteArray(); + } + } + + private static class KeyParser { + private final byte[] b; + private int offset; + + public KeyParser(byte[] b, int offset) { + this.b = b; + this.offset = offset; + } + + public String getNextString() throws IOException { + if (offset >= b.length) { + throw new IOException( + "tried to read nonexistent string from byte array"); + } + int i = 0; + while (offset+i < b.length && b[offset+i] != 0x0) { + i++; + } + String s = new String(b, offset, i); + offset = offset + i + 1; + return s; + } + + public long getNextLong() throws IOException { + if (offset+8 >= b.length) { + throw new IOException("byte array ran out when trying to read long"); + } + long l = readReverseOrderedLong(b, offset); + offset += 8; + return l; + } + + public int getOffset() { + return offset; + } + } + + @Override + public TimelineEntity getEntity(String entityId, String entityType, + EnumSet fields) throws IOException { + Long revStartTime = getStartTimeLong(entityId, entityType); + if (revStartTime == null) { + return null; + } + byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(writeReverseOrderedLong(revStartTime)) + .add(entityId).getBytesForLookup(); + + DBIterator iterator = null; + try { + iterator = db.iterator(); + iterator.seek(prefix); + + return getEntity(entityId, entityType, revStartTime, fields, iterator, + prefix, prefix.length); + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Read entity from a db iterator. If no information is found in the + * specified fields for this entity, return null. + */ + private static TimelineEntity getEntity(String entityId, String entityType, + Long startTime, EnumSet fields, DBIterator iterator, + byte[] prefix, int prefixlen) throws IOException { + if (fields == null) { + fields = EnumSet.allOf(Field.class); + } + + TimelineEntity entity = new TimelineEntity(); + boolean events = false; + boolean lastEvent = false; + if (fields.contains(Field.EVENTS)) { + events = true; + } else if (fields.contains(Field.LAST_EVENT_ONLY)) { + lastEvent = true; + } else { + entity.setEvents(null); + } + boolean relatedEntities = false; + if (fields.contains(Field.RELATED_ENTITIES)) { + relatedEntities = true; + } else { + entity.setRelatedEntities(null); + } + boolean primaryFilters = false; + if (fields.contains(Field.PRIMARY_FILTERS)) { + primaryFilters = true; + } else { + entity.setPrimaryFilters(null); + } + boolean otherInfo = false; + if (fields.contains(Field.OTHER_INFO)) { + otherInfo = true; + } else { + entity.setOtherInfo(null); + } + + // iterate through the entity's entry, parsing information if it is part + // of a requested field + for (; iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefixlen, key)) { + break; + } + if (key.length == prefixlen) { + continue; + } + if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) { + if (primaryFilters) { + addPrimaryFilter(entity, key, + prefixlen + PRIMARY_FILTERS_COLUMN.length); + } + } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) { + if (otherInfo) { + entity.addOtherInfo(parseRemainingKey(key, + prefixlen + OTHER_INFO_COLUMN.length), + GenericObjectMapper.read(iterator.peekNext().getValue())); + } + } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) { + if (relatedEntities) { + addRelatedEntity(entity, key, + prefixlen + RELATED_ENTITIES_COLUMN.length); + } + } else if (key[prefixlen] == EVENTS_COLUMN[0]) { + if (events || (lastEvent && + entity.getEvents().size() == 0)) { + TimelineEvent event = getEntityEvent(null, key, prefixlen + + EVENTS_COLUMN.length, iterator.peekNext().getValue()); + if (event != null) { + entity.addEvent(event); + } + } + } else { + if (key[prefixlen] != + INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) { + LOG.warn(String.format("Found unexpected column for entity %s of " + + "type %s (0x%02x)", entityId, entityType, key[prefixlen])); + } + } + } + + entity.setEntityId(entityId); + entity.setEntityType(entityType); + entity.setStartTime(startTime); + + return entity; + } + + @Override + public TimelineEvents getEntityTimelines(String entityType, + SortedSet entityIds, Long limit, Long windowStart, + Long windowEnd, Set eventType) throws IOException { + TimelineEvents events = new TimelineEvents(); + if (entityIds == null || entityIds.isEmpty()) { + return events; + } + // create a lexicographically-ordered map from start time to entities + Map> startTimeMap = new TreeMap>(new Comparator() { + @Override + public int compare(byte[] o1, byte[] o2) { + return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, + o2.length); + } + }); + DBIterator iterator = null; + try { + // look up start times for the specified entities + // skip entities with no start time + for (String entityId : entityIds) { + byte[] startTime = getStartTime(entityId, entityType); + if (startTime != null) { + List entities = startTimeMap.get(startTime); + if (entities == null) { + entities = new ArrayList(); + startTimeMap.put(startTime, entities); + } + entities.add(new EntityIdentifier(entityId, entityType)); + } + } + for (Entry> entry : + startTimeMap.entrySet()) { + // look up the events matching the given parameters (limit, + // start time, end time, event types) for entities whose start times + // were found and add the entities to the return list + byte[] revStartTime = entry.getKey(); + for (EntityIdentifier entityIdentifier : entry.getValue()) { + EventsOfOneEntity entity = new EventsOfOneEntity(); + entity.setEntityId(entityIdentifier.getId()); + entity.setEntityType(entityType); + events.addEvent(entity); + KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entityIdentifier.getId()) + .add(EVENTS_COLUMN); + byte[] prefix = kb.getBytesForLookup(); + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + byte[] revts = writeReverseOrderedLong(windowEnd); + kb.add(revts); + byte[] first = kb.getBytesForLookup(); + byte[] last = null; + if (windowStart != null) { + last = KeyBuilder.newInstance().add(prefix) + .add(writeReverseOrderedLong(windowStart)).getBytesForLookup(); + } + if (limit == null) { + limit = DEFAULT_LIMIT; + } + iterator = db.iterator(); + for (iterator.seek(first); entity.getEvents().size() < limit && + iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key) || (last != null && + WritableComparator.compareBytes(key, 0, key.length, last, 0, + last.length) > 0)) { + break; + } + TimelineEvent event = getEntityEvent(eventType, key, prefix.length, + iterator.peekNext().getValue()); + if (event != null) { + entity.addEvent(event); + } + } + } + } + } finally { + IOUtils.cleanup(LOG, iterator); + } + return events; + } + + /** + * Returns true if the byte array begins with the specified prefix. + */ + private static boolean prefixMatches(byte[] prefix, int prefixlen, + byte[] b) { + if (b.length < prefixlen) { + return false; + } + return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0, + prefixlen) == 0; + } + + @Override + public TimelineEntities getEntities(String entityType, + Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, + NameValuePair primaryFilter, Collection secondaryFilters, + EnumSet fields) throws IOException { + if (primaryFilter == null) { + // if no primary filter is specified, prefix the lookup with + // ENTITY_ENTRY_PREFIX + return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit, + windowStart, windowEnd, fromId, fromTs, secondaryFilters, fields); + } else { + // if a primary filter is specified, prefix the lookup with + // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue + + // ENTITY_ENTRY_PREFIX + byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) + .add(primaryFilter.getName()) + .add(GenericObjectMapper.write(primaryFilter.getValue()), true) + .add(ENTITY_ENTRY_PREFIX).getBytesForLookup(); + return getEntityByTime(base, entityType, limit, windowStart, windowEnd, + fromId, fromTs, secondaryFilters, fields); + } + } + + /** + * Retrieves a list of entities satisfying given parameters. + * + * @param base A byte array prefix for the lookup + * @param entityType The type of the entity + * @param limit A limit on the number of entities to return + * @param starttime The earliest entity start time to retrieve (exclusive) + * @param endtime The latest entity start time to retrieve (inclusive) + * @param fromId Retrieve entities starting with this entity + * @param fromTs Ignore entities with insert timestamp later than this ts + * @param secondaryFilters Filter pairs that the entities should match + * @param fields The set of fields to retrieve + * @return A list of entities + * @throws IOException + */ + private TimelineEntities getEntityByTime(byte[] base, + String entityType, Long limit, Long starttime, Long endtime, + String fromId, Long fromTs, Collection secondaryFilters, + EnumSet fields) throws IOException { + DBIterator iterator = null; + try { + KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); + // only db keys matching the prefix (base + entity type) will be parsed + byte[] prefix = kb.getBytesForLookup(); + if (endtime == null) { + // if end time is null, place no restriction on end time + endtime = Long.MAX_VALUE; + } + // construct a first key that will be seeked to using end time or fromId + byte[] first = null; + if (fromId != null) { + Long fromIdStartTime = getStartTimeLong(fromId, entityType); + if (fromIdStartTime == null) { + // no start time for provided id, so return empty entities + return new TimelineEntities(); + } + if (fromIdStartTime <= endtime) { + // if provided id's start time falls before the end of the window, + // use it to construct the seek key + first = kb.add(writeReverseOrderedLong(fromIdStartTime)) + .add(fromId).getBytesForLookup(); + } + } + // if seek key wasn't constructed using fromId, construct it using end ts + if (first == null) { + first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup(); + } + byte[] last = null; + if (starttime != null) { + // if start time is not null, set a last key that will not be + // iterated past + last = KeyBuilder.newInstance().add(base).add(entityType) + .add(writeReverseOrderedLong(starttime)).getBytesForLookup(); + } + if (limit == null) { + // if limit is not specified, use the default + limit = DEFAULT_LIMIT; + } + + TimelineEntities entities = new TimelineEntities(); + iterator = db.iterator(); + iterator.seek(first); + // iterate until one of the following conditions is met: limit is + // reached, there are no more keys, the key prefix no longer matches, + // or a start time has been specified and reached/exceeded + while (entities.getEntities().size() < limit && iterator.hasNext()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key) || (last != null && + WritableComparator.compareBytes(key, 0, key.length, last, 0, + last.length) > 0)) { + break; + } + // read the start time and entity id from the current key + KeyParser kp = new KeyParser(key, prefix.length); + Long startTime = kp.getNextLong(); + String entityId = kp.getNextString(); + + if (fromTs != null) { + long insertTime = readReverseOrderedLong(iterator.peekNext() + .getValue(), 0); + if (insertTime > fromTs) { + byte[] firstKey = key; + while (iterator.hasNext() && prefixMatches(firstKey, + kp.getOffset(), key)) { + iterator.next(); + key = iterator.peekNext().getKey(); + } + continue; + } + } + + // parse the entity that owns this key, iterating over all keys for + // the entity + TimelineEntity entity = getEntity(entityId, entityType, startTime, + fields, iterator, key, kp.getOffset()); + // determine if the retrieved entity matches the provided secondary + // filters, and if so add it to the list of entities to return + boolean filterPassed = true; + if (secondaryFilters != null) { + for (NameValuePair filter : secondaryFilters) { + Object v = entity.getOtherInfo().get(filter.getName()); + if (v == null) { + Set vs = entity.getPrimaryFilters() + .get(filter.getName()); + if (vs != null && !vs.contains(filter.getValue())) { + filterPassed = false; + break; + } + } else if (!v.equals(filter.getValue())) { + filterPassed = false; + break; + } + } + } + if (filterPassed) { + entities.addEntity(entity); + } + } + return entities; + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Put a single entity. If there is an error, add a TimelinePutError to the + * given response. + */ + private void put(TimelineEntity entity, TimelinePutResponse response) { + LockMap.CountingReentrantLock lock = + writeLocks.getLock(new EntityIdentifier(entity.getEntityId(), + entity.getEntityType())); + lock.lock(); + WriteBatch writeBatch = null; + List relatedEntitiesWithoutStartTimes = + new ArrayList(); + byte[] revStartTime = null; + try { + writeBatch = db.createWriteBatch(); + List events = entity.getEvents(); + // look up the start time for the entity + StartAndInsertTime startAndInsertTime = getAndSetStartTime( + entity.getEntityId(), entity.getEntityType(), + entity.getStartTime(), events); + if (startAndInsertTime == null) { + // if no start time is found, add an error and return + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.NO_START_TIME); + response.addError(error); + return; + } + revStartTime = writeReverseOrderedLong(startAndInsertTime + .startTime); + + Map> primaryFilters = entity.getPrimaryFilters(); + + // write entity marker + byte[] markerKey = createEntityMarkerKey(entity.getEntityId(), + entity.getEntityType(), revStartTime); + byte[] markerValue = writeReverseOrderedLong(startAndInsertTime + .insertTime); + writeBatch.put(markerKey, markerValue); + writePrimaryFilterEntries(writeBatch, primaryFilters, markerKey, + markerValue); + + // write event entries + if (events != null && !events.isEmpty()) { + for (TimelineEvent event : events) { + byte[] revts = writeReverseOrderedLong(event.getTimestamp()); + byte[] key = createEntityEventKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, revts, + event.getEventType()); + byte[] value = GenericObjectMapper.write(event.getEventInfo()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + + // write related entity entries + Map> relatedEntities = + entity.getRelatedEntities(); + if (relatedEntities != null && !relatedEntities.isEmpty()) { + for (Entry> relatedEntityList : + relatedEntities.entrySet()) { + String relatedEntityType = relatedEntityList.getKey(); + for (String relatedEntityId : relatedEntityList.getValue()) { + // invisible "reverse" entries (entity -> related entity) + byte[] key = createReverseRelatedEntityKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, relatedEntityId, + relatedEntityType); + writeBatch.put(key, EMPTY_BYTES); + // look up start time of related entity + byte[] relatedEntityStartTime = getStartTime(relatedEntityId, + relatedEntityType); + // delay writing the related entity if no start time is found + if (relatedEntityStartTime == null) { + relatedEntitiesWithoutStartTimes.add( + new EntityIdentifier(relatedEntityId, relatedEntityType)); + continue; + } + // write "forward" entry (related entity -> entity) + key = createRelatedEntityKey(relatedEntityId, + relatedEntityType, relatedEntityStartTime, + entity.getEntityId(), entity.getEntityType()); + writeBatch.put(key, EMPTY_BYTES); + } + } + } + + // write primary filter entries + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry> primaryFilter : + primaryFilters.entrySet()) { + for (Object primaryFilterValue : primaryFilter.getValue()) { + byte[] key = createPrimaryFilterKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, + primaryFilter.getKey(), primaryFilterValue); + writeBatch.put(key, EMPTY_BYTES); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, + EMPTY_BYTES); + } + } + } + + // write other info entries + Map otherInfo = entity.getOtherInfo(); + if (otherInfo != null && !otherInfo.isEmpty()) { + for (Entry i : otherInfo.entrySet()) { + byte[] key = createOtherInfoKey(entity.getEntityId(), + entity.getEntityType(), revStartTime, i.getKey()); + byte[] value = GenericObjectMapper.write(i.getValue()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + db.write(writeBatch); + } catch (IOException e) { + LOG.error("Error putting entity " + entity.getEntityId() + + " of type " + entity.getEntityType(), e); + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.IO_EXCEPTION); + response.addError(error); + } finally { + lock.unlock(); + writeLocks.returnLock(lock); + IOUtils.cleanup(LOG, writeBatch); + } + + for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) { + lock = writeLocks.getLock(relatedEntity); + lock.lock(); + try { + StartAndInsertTime relatedEntityStartAndInsertTime = + getAndSetStartTime(relatedEntity.getId(), relatedEntity.getType(), + readReverseOrderedLong(revStartTime, 0), null); + if (relatedEntityStartAndInsertTime == null) { + throw new IOException("Error setting start time for related entity"); + } + byte[] relatedEntityStartTime = writeReverseOrderedLong( + relatedEntityStartAndInsertTime.startTime); + db.put(createRelatedEntityKey(relatedEntity.getId(), + relatedEntity.getType(), relatedEntityStartTime, + entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES); + db.put(createEntityMarkerKey(relatedEntity.getId(), + relatedEntity.getType(), relatedEntityStartTime), + writeReverseOrderedLong(relatedEntityStartAndInsertTime + .insertTime)); + } catch (IOException e) { + LOG.error("Error putting related entity " + relatedEntity.getId() + + " of type " + relatedEntity.getType() + " for entity " + + entity.getEntityId() + " of type " + entity.getEntityType(), e); + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.IO_EXCEPTION); + response.addError(error); + } finally { + lock.unlock(); + writeLocks.returnLock(lock); + } + } + } + + /** + * For a given key / value pair that has been written to the db, + * write additional entries to the db for each primary filter. + */ + private static void writePrimaryFilterEntries(WriteBatch writeBatch, + Map> primaryFilters, byte[] key, byte[] value) + throws IOException { + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry> pf : primaryFilters.entrySet()) { + for (Object pfval : pf.getValue()) { + writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval, + key), value); + } + } + } + } + + @Override + public TimelinePutResponse put(TimelineEntities entities) { + try { + deleteLock.readLock().lock(); + TimelinePutResponse response = new TimelinePutResponse(); + for (TimelineEntity entity : entities.getEntities()) { + put(entity, response); + } + return response; + } finally { + deleteLock.readLock().unlock(); + } + } + + /** + * Get the unique start time for a given entity as a byte array that sorts + * the timestamps in reverse order (see {@link + * GenericObjectMapper#writeReverseOrderedLong(long)}). + * + * @param entityId The id of the entity + * @param entityType The type of the entity + * @return A byte array, null if not found + * @throws IOException + */ + private byte[] getStartTime(String entityId, String entityType) + throws IOException { + Long l = getStartTimeLong(entityId, entityType); + return l == null ? null : writeReverseOrderedLong(l); + } + + /** + * Get the unique start time for a given entity as a Long. + * + * @param entityId The id of the entity + * @param entityType The type of the entity + * @return A Long, null if not found + * @throws IOException + */ + private Long getStartTimeLong(String entityId, String entityType) + throws IOException { + EntityIdentifier entity = new EntityIdentifier(entityId, entityType); + // start time is not provided, so try to look it up + if (startTimeReadCache.containsKey(entity)) { + // found the start time in the cache + return startTimeReadCache.get(entity); + } else { + // try to look up the start time in the db + byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); + byte[] v = db.get(b); + if (v == null) { + // did not find the start time in the db + return null; + } else { + // found the start time in the db + Long l = readReverseOrderedLong(v, 0); + startTimeReadCache.put(entity, l); + return l; + } + } + } + + /** + * Get the unique start time for a given entity as a byte array that sorts + * the timestamps in reverse order (see {@link + * GenericObjectMapper#writeReverseOrderedLong(long)}). If the start time + * doesn't exist, set it based on the information provided. Should only be + * called when a lock has been obtained on the entity. + * + * @param entityId The id of the entity + * @param entityType The type of the entity + * @param startTime The start time of the entity, or null + * @param events A list of events for the entity, or null + * @return A StartAndInsertTime + * @throws IOException + */ + private StartAndInsertTime getAndSetStartTime(String entityId, + String entityType, Long startTime, List events) + throws IOException { + EntityIdentifier entity = new EntityIdentifier(entityId, entityType); + if (startTime == null) { + // start time is not provided, so try to look it up + if (startTimeWriteCache.containsKey(entity)) { + // found the start time in the cache + return startTimeWriteCache.get(entity); + } else { + if (events != null) { + // prepare a start time from events in case it is needed + Long min = Long.MAX_VALUE; + for (TimelineEvent e : events) { + if (min > e.getTimestamp()) { + min = e.getTimestamp(); + } + } + startTime = min; + } + return checkStartTimeInDb(entity, startTime); + } + } else { + // start time is provided + if (startTimeWriteCache.containsKey(entity)) { + // always use start time from cache if it exists + return startTimeWriteCache.get(entity); + } else { + // check the provided start time matches the db + return checkStartTimeInDb(entity, startTime); + } + } + } + + /** + * Checks db for start time and returns it if it exists. If it doesn't + * exist, writes the suggested start time (if it is not null). This is + * only called when the start time is not found in the cache, + * so it adds it back into the cache if it is found. Should only be called + * when a lock has been obtained on the entity. + */ + private StartAndInsertTime checkStartTimeInDb(EntityIdentifier entity, + Long suggestedStartTime) throws IOException { + StartAndInsertTime startAndInsertTime = null; + // create lookup key for start time + byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); + // retrieve value for key + byte[] v = db.get(b); + if (v == null) { + // start time doesn't exist in db + if (suggestedStartTime == null) { + return null; + } + startAndInsertTime = new StartAndInsertTime(suggestedStartTime, + System.currentTimeMillis()); + + // write suggested start time + v = new byte[16]; + writeReverseOrderedLong(suggestedStartTime, v, 0); + writeReverseOrderedLong(startAndInsertTime.insertTime, v, 8); + WriteOptions writeOptions = new WriteOptions(); + writeOptions.sync(true); + db.put(b, v, writeOptions); + } else { + // found start time in db, so ignore suggested start time + startAndInsertTime = new StartAndInsertTime(readReverseOrderedLong(v, 0), + readReverseOrderedLong(v, 8)); + } + startTimeWriteCache.put(entity, startAndInsertTime); + startTimeReadCache.put(entity, startAndInsertTime.startTime); + return startAndInsertTime; + } + + /** + * Creates a key for looking up the start time of a given entity, + * of the form START_TIME_LOOKUP_PREFIX + entity type + entity id. + */ + private static byte[] createStartTimeLookupKey(String entityId, + String entityType) throws IOException { + return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX) + .add(entityType).add(entityId).getBytes(); + } + + /** + * Creates an entity marker, serializing ENTITY_ENTRY_PREFIX + entity type + + * revstarttime + entity id. + */ + private static byte[] createEntityMarkerKey(String entityId, + String entityType, byte[] revStartTime) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entityId).getBytesForLookup(); + } + + /** + * Creates an index entry for the given key of the form + * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key. + */ + private static byte[] addPrimaryFilterToKey(String primaryFilterName, + Object primaryFilterValue, byte[] key) throws IOException { + return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) + .add(primaryFilterName) + .add(GenericObjectMapper.write(primaryFilterValue), true).add(key) + .getBytes(); + } + + /** + * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entity type + + * revstarttime + entity id + EVENTS_COLUMN + reveventtimestamp + event type. + */ + private static byte[] createEntityEventKey(String entityId, + String entityType, byte[] revStartTime, byte[] revEventTimestamp, + String eventType) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entityId).add(EVENTS_COLUMN) + .add(revEventTimestamp).add(eventType).getBytes(); + } + + /** + * Creates an event object from the given key, offset, and value. If the + * event type is not contained in the specified set of event types, + * returns null. + */ + private static TimelineEvent getEntityEvent(Set eventTypes, + byte[] key, int offset, byte[] value) throws IOException { + KeyParser kp = new KeyParser(key, offset); + long ts = kp.getNextLong(); + String tstype = kp.getNextString(); + if (eventTypes == null || eventTypes.contains(tstype)) { + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(ts); + event.setEventType(tstype); + Object o = GenericObjectMapper.read(value); + if (o == null) { + event.setEventInfo(null); + } else if (o instanceof Map) { + @SuppressWarnings("unchecked") + Map m = (Map) o; + event.setEventInfo(m); + } else { + throw new IOException("Couldn't deserialize event info map"); + } + return event; + } + return null; + } + + /** + * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX + + * entity type + revstarttime + entity id + PRIMARY_FILTERS_COLUMN + name + + * value. + */ + private static byte[] createPrimaryFilterKey(String entityId, + String entityType, byte[] revStartTime, String name, Object value) + throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) + .add(revStartTime).add(entityId).add(PRIMARY_FILTERS_COLUMN).add(name) + .add(GenericObjectMapper.write(value)).getBytes(); + } + + /** + * Parses the primary filter from the given key at the given offset and + * adds it to the given entity. + */ + private static void addPrimaryFilter(TimelineEntity entity, byte[] key, + int offset) throws IOException { + KeyParser kp = new KeyParser(key, offset); + String name = kp.getNextString(); + Object value = GenericObjectMapper.read(key, kp.getOffset()); + entity.addPrimaryFilter(name, value); + } + + /** + * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entity type + + * revstarttime + entity id + OTHER_INFO_COLUMN + name. + */ + private static byte[] createOtherInfoKey(String entityId, String entityType, + byte[] revStartTime, String name) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) + .add(revStartTime).add(entityId).add(OTHER_INFO_COLUMN).add(name) + .getBytes(); + } + + /** + * Creates a string representation of the byte array from the given offset + * to the end of the array (for parsing other info keys). + */ + private static String parseRemainingKey(byte[] b, int offset) { + return new String(b, offset, b.length - offset); + } + + /** + * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX + + * entity type + revstarttime + entity id + RELATED_ENTITIES_COLUMN + + * relatedentity type + relatedentity id. + */ + private static byte[] createRelatedEntityKey(String entityId, + String entityType, byte[] revStartTime, String relatedEntityId, + String relatedEntityType) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) + .add(revStartTime).add(entityId).add(RELATED_ENTITIES_COLUMN) + .add(relatedEntityType).add(relatedEntityId).getBytes(); + } + + /** + * Parses the related entity from the given key at the given offset and + * adds it to the given entity. + */ + private static void addRelatedEntity(TimelineEntity entity, byte[] key, + int offset) throws IOException { + KeyParser kp = new KeyParser(key, offset); + String type = kp.getNextString(); + String id = kp.getNextString(); + entity.addRelatedEntity(type, id); + } + + /** + * Creates a reverse related entity key, serializing ENTITY_ENTRY_PREFIX + + * entity type + revstarttime + entity id + + * INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN + + * relatedentity type + relatedentity id. + */ + private static byte[] createReverseRelatedEntityKey(String entityId, + String entityType, byte[] revStartTime, String relatedEntityId, + String relatedEntityType) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) + .add(revStartTime).add(entityId) + .add(INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN) + .add(relatedEntityType).add(relatedEntityId).getBytes(); + } + + /** + * Clears the cache to test reloading start times from leveldb (only for + * testing). + */ + @VisibleForTesting + void clearStartTimeCache() { + startTimeWriteCache.clear(); + startTimeReadCache.clear(); + } + + @VisibleForTesting + static int getStartTimeReadCacheSize(Configuration conf) { + return conf.getInt( + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE, + YarnConfiguration. + DEFAULT_TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE); + } + + @VisibleForTesting + static int getStartTimeWriteCacheSize(Configuration conf) { + return conf.getInt( + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE, + YarnConfiguration. + DEFAULT_TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE); + } + + // warning is suppressed to prevent eclipse from noting unclosed resource + @SuppressWarnings("resource") + @VisibleForTesting + List getEntityTypes() throws IOException { + DBIterator iterator = null; + try { + iterator = getDbIterator(false); + List entityTypes = new ArrayList(); + iterator.seek(ENTITY_ENTRY_PREFIX); + while (iterator.hasNext()) { + byte[] key = iterator.peekNext().getKey(); + if (key[0] != ENTITY_ENTRY_PREFIX[0]) { + break; + } + KeyParser kp = new KeyParser(key, + ENTITY_ENTRY_PREFIX.length); + String entityType = kp.getNextString(); + entityTypes.add(entityType); + byte[] lookupKey = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).getBytesForLookup(); + if (lookupKey[lookupKey.length - 1] != 0x0) { + throw new IOException("Found unexpected end byte in lookup key"); + } + lookupKey[lookupKey.length - 1] = 0x1; + iterator.seek(lookupKey); + } + return entityTypes; + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Finds all keys in the db that have a given prefix and deletes them on + * the given write batch. + */ + private void deleteKeysWithPrefix(WriteBatch writeBatch, byte[] prefix, + DBIterator iterator) { + for (iterator.seek(prefix); iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key)) { + break; + } + writeBatch.delete(key); + } + } + + @VisibleForTesting + boolean deleteNextEntity(String entityType, byte[] reverseTimestamp, + DBIterator iterator, DBIterator pfIterator, boolean seeked) + throws IOException { + WriteBatch writeBatch = null; + try { + KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType); + byte[] typePrefix = kb.getBytesForLookup(); + kb.add(reverseTimestamp); + if (!seeked) { + iterator.seek(kb.getBytesForLookup()); + } + if (!iterator.hasNext()) { + return false; + } + byte[] entityKey = iterator.peekNext().getKey(); + if (!prefixMatches(typePrefix, typePrefix.length, entityKey)) { + return false; + } + + // read the start time and entity id from the current key + KeyParser kp = new KeyParser(entityKey, typePrefix.length + 8); + String entityId = kp.getNextString(); + int prefixlen = kp.getOffset(); + byte[] deletePrefix = new byte[prefixlen]; + System.arraycopy(entityKey, 0, deletePrefix, 0, prefixlen); + + writeBatch = db.createWriteBatch(); + + if (LOG.isDebugEnabled()) { + LOG.debug("Deleting entity type:" + entityType + " id:" + entityId); + } + // remove start time from cache and db + writeBatch.delete(createStartTimeLookupKey(entityId, entityType)); + EntityIdentifier entityIdentifier = + new EntityIdentifier(entityId, entityType); + startTimeReadCache.remove(entityIdentifier); + startTimeWriteCache.remove(entityIdentifier); + + // delete current entity + for (; iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(entityKey, prefixlen, key)) { + break; + } + writeBatch.delete(key); + + if (key.length == prefixlen) { + continue; + } + if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) { + kp = new KeyParser(key, + prefixlen + PRIMARY_FILTERS_COLUMN.length); + String name = kp.getNextString(); + Object value = GenericObjectMapper.read(key, kp.getOffset()); + deleteKeysWithPrefix(writeBatch, addPrimaryFilterToKey(name, value, + deletePrefix), pfIterator); + if (LOG.isDebugEnabled()) { + LOG.debug("Deleting entity type:" + entityType + " id:" + + entityId + " primary filter entry " + name + " " + + value); + } + } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) { + kp = new KeyParser(key, + prefixlen + RELATED_ENTITIES_COLUMN.length); + String type = kp.getNextString(); + String id = kp.getNextString(); + byte[] relatedEntityStartTime = getStartTime(id, type); + if (relatedEntityStartTime == null) { + LOG.warn("Found no start time for " + + "related entity " + id + " of type " + type + " while " + + "deleting " + entityId + " of type " + entityType); + continue; + } + writeBatch.delete(createReverseRelatedEntityKey(id, type, + relatedEntityStartTime, entityId, entityType)); + if (LOG.isDebugEnabled()) { + LOG.debug("Deleting entity type:" + entityType + " id:" + + entityId + " from invisible reverse related entity " + + "entry of type:" + type + " id:" + id); + } + } else if (key[prefixlen] == + INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) { + kp = new KeyParser(key, prefixlen + + INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN.length); + String type = kp.getNextString(); + String id = kp.getNextString(); + byte[] relatedEntityStartTime = getStartTime(id, type); + if (relatedEntityStartTime == null) { + LOG.warn("Found no start time for reverse " + + "related entity " + id + " of type " + type + " while " + + "deleting " + entityId + " of type " + entityType); + continue; + } + writeBatch.delete(createRelatedEntityKey(id, type, + relatedEntityStartTime, entityId, entityType)); + if (LOG.isDebugEnabled()) { + LOG.debug("Deleting entity type:" + entityType + " id:" + + entityId + " from related entity entry of type:" + + type + " id:" + id); + } + } + } + WriteOptions writeOptions = new WriteOptions(); + writeOptions.sync(true); + db.write(writeBatch, writeOptions); + return true; + } finally { + IOUtils.cleanup(LOG, writeBatch); + } + } + + /** + * Discards entities with start timestamp less than or equal to the given + * timestamp. + */ + @VisibleForTesting + void discardOldEntities(long timestamp) + throws IOException, InterruptedException { + byte[] reverseTimestamp = writeReverseOrderedLong(timestamp); + long totalCount = 0; + long t1 = System.currentTimeMillis(); + try { + List entityTypes = getEntityTypes(); + for (String entityType : entityTypes) { + DBIterator iterator = null; + DBIterator pfIterator = null; + long typeCount = 0; + try { + deleteLock.writeLock().lock(); + iterator = getDbIterator(false); + pfIterator = getDbIterator(false); + + if (deletionThread != null && deletionThread.isInterrupted()) { + throw new InterruptedException(); + } + boolean seeked = false; + while (deleteNextEntity(entityType, reverseTimestamp, iterator, + pfIterator, seeked)) { + typeCount++; + totalCount++; + seeked = true; + if (deletionThread != null && deletionThread.isInterrupted()) { + throw new InterruptedException(); + } + } + } catch (IOException e) { + LOG.error("Got IOException while deleting entities for type " + + entityType + ", continuing to next type", e); + } finally { + IOUtils.cleanup(LOG, iterator, pfIterator); + deleteLock.writeLock().unlock(); + if (typeCount > 0) { + LOG.info("Deleted " + typeCount + " entities of type " + + entityType); + } + } + } + } finally { + long t2 = System.currentTimeMillis(); + LOG.info("Discarded " + totalCount + " entities for timestamp " + + timestamp + " and earlier in " + (t2 - t1) / 1000.0 + " seconds"); + } + } + + @VisibleForTesting + DBIterator getDbIterator(boolean fillCache) { + ReadOptions readOptions = new ReadOptions(); + readOptions.fillCache(fillCache); + return db.iterator(readOptions); + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java new file mode 100644 index 0000000..b94711c --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java @@ -0,0 +1,373 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.PriorityQueue; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; + +/** + * In-memory implementation of {@link TimelineStore}. This + * implementation is for test purpose only. If users improperly instantiate it, + * they may encounter reading and writing history data in different memory + * store. + * + */ +@Private +@Unstable +public class MemoryTimelineStore + extends AbstractService implements TimelineStore { + + private Map entities = + new HashMap(); + private Map entityInsertTimes = + new HashMap(); + + public MemoryTimelineStore() { + super(MemoryTimelineStore.class.getName()); + } + + @Override + public TimelineEntities getEntities(String entityType, Long limit, + Long windowStart, Long windowEnd, String fromId, Long fromTs, + NameValuePair primaryFilter, Collection secondaryFilters, + EnumSet fields) { + if (limit == null) { + limit = DEFAULT_LIMIT; + } + if (windowStart == null) { + windowStart = Long.MIN_VALUE; + } + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + if (fields == null) { + fields = EnumSet.allOf(Field.class); + } + + Iterator entityIterator = null; + if (fromId != null) { + TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId, + entityType)); + if (firstEntity == null) { + return new TimelineEntities(); + } else { + entityIterator = new TreeSet(entities.values()) + .tailSet(firstEntity, true).iterator(); + } + } + if (entityIterator == null) { + entityIterator = new PriorityQueue(entities.values()) + .iterator(); + } + + List entitiesSelected = new ArrayList(); + while (entityIterator.hasNext()) { + TimelineEntity entity = entityIterator.next(); + if (entitiesSelected.size() >= limit) { + break; + } + if (!entity.getEntityType().equals(entityType)) { + continue; + } + if (entity.getStartTime() <= windowStart) { + continue; + } + if (entity.getStartTime() > windowEnd) { + continue; + } + if (fromTs != null && entityInsertTimes.get(new EntityIdentifier( + entity.getEntityId(), entity.getEntityType())) > fromTs) { + continue; + } + if (primaryFilter != null && + !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) { + continue; + } + if (secondaryFilters != null) { // AND logic + boolean flag = true; + for (NameValuePair secondaryFilter : secondaryFilters) { + if (secondaryFilter != null && !matchPrimaryFilter( + entity.getPrimaryFilters(), secondaryFilter) && + !matchFilter(entity.getOtherInfo(), secondaryFilter)) { + flag = false; + break; + } + } + if (!flag) { + continue; + } + } + entitiesSelected.add(entity); + } + List entitiesToReturn = new ArrayList(); + for (TimelineEntity entitySelected : entitiesSelected) { + entitiesToReturn.add(maskFields(entitySelected, fields)); + } + Collections.sort(entitiesToReturn); + TimelineEntities entitiesWrapper = new TimelineEntities(); + entitiesWrapper.setEntities(entitiesToReturn); + return entitiesWrapper; + } + + @Override + public TimelineEntity getEntity(String entityId, String entityType, + EnumSet fieldsToRetrieve) { + if (fieldsToRetrieve == null) { + fieldsToRetrieve = EnumSet.allOf(Field.class); + } + TimelineEntity entity = entities.get(new EntityIdentifier(entityId, entityType)); + if (entity == null) { + return null; + } else { + return maskFields(entity, fieldsToRetrieve); + } + } + + @Override + public TimelineEvents getEntityTimelines(String entityType, + SortedSet entityIds, Long limit, Long windowStart, + Long windowEnd, + Set eventTypes) { + TimelineEvents allEvents = new TimelineEvents(); + if (entityIds == null) { + return allEvents; + } + if (limit == null) { + limit = DEFAULT_LIMIT; + } + if (windowStart == null) { + windowStart = Long.MIN_VALUE; + } + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + for (String entityId : entityIds) { + EntityIdentifier entityID = new EntityIdentifier(entityId, entityType); + TimelineEntity entity = entities.get(entityID); + if (entity == null) { + continue; + } + EventsOfOneEntity events = new EventsOfOneEntity(); + events.setEntityId(entityId); + events.setEntityType(entityType); + for (TimelineEvent event : entity.getEvents()) { + if (events.getEvents().size() >= limit) { + break; + } + if (event.getTimestamp() <= windowStart) { + continue; + } + if (event.getTimestamp() > windowEnd) { + continue; + } + if (eventTypes != null && !eventTypes.contains(event.getEventType())) { + continue; + } + events.addEvent(event); + } + allEvents.addEvent(events); + } + return allEvents; + } + + @Override + public TimelinePutResponse put(TimelineEntities data) { + TimelinePutResponse response = new TimelinePutResponse(); + for (TimelineEntity entity : data.getEntities()) { + EntityIdentifier entityId = + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); + // store entity info in memory + TimelineEntity existingEntity = entities.get(entityId); + if (existingEntity == null) { + existingEntity = new TimelineEntity(); + existingEntity.setEntityId(entity.getEntityId()); + existingEntity.setEntityType(entity.getEntityType()); + existingEntity.setStartTime(entity.getStartTime()); + entities.put(entityId, existingEntity); + entityInsertTimes.put(entityId, System.currentTimeMillis()); + } + if (entity.getEvents() != null) { + if (existingEntity.getEvents() == null) { + existingEntity.setEvents(entity.getEvents()); + } else { + existingEntity.addEvents(entity.getEvents()); + } + Collections.sort(existingEntity.getEvents()); + } + // check startTime + if (existingEntity.getStartTime() == null) { + if (existingEntity.getEvents() == null + || existingEntity.getEvents().isEmpty()) { + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entityId.getId()); + error.setEntityType(entityId.getType()); + error.setErrorCode(TimelinePutError.NO_START_TIME); + response.addError(error); + entities.remove(entityId); + entityInsertTimes.remove(entityId); + continue; + } else { + Long min = Long.MAX_VALUE; + for (TimelineEvent e : entity.getEvents()) { + if (min > e.getTimestamp()) { + min = e.getTimestamp(); + } + } + existingEntity.setStartTime(min); + } + } + if (entity.getPrimaryFilters() != null) { + if (existingEntity.getPrimaryFilters() == null) { + existingEntity.setPrimaryFilters(new HashMap>()); + } + for (Entry> pf : + entity.getPrimaryFilters().entrySet()) { + for (Object pfo : pf.getValue()) { + existingEntity.addPrimaryFilter(pf.getKey(), maybeConvert(pfo)); + } + } + } + if (entity.getOtherInfo() != null) { + if (existingEntity.getOtherInfo() == null) { + existingEntity.setOtherInfo(new HashMap()); + } + for (Entry info : entity.getOtherInfo().entrySet()) { + existingEntity.addOtherInfo(info.getKey(), + maybeConvert(info.getValue())); + } + } + // relate it to other entities + if (entity.getRelatedEntities() == null) { + continue; + } + for (Map.Entry> partRelatedEntities : entity + .getRelatedEntities().entrySet()) { + if (partRelatedEntities == null) { + continue; + } + for (String idStr : partRelatedEntities.getValue()) { + EntityIdentifier relatedEntityId = + new EntityIdentifier(idStr, partRelatedEntities.getKey()); + TimelineEntity relatedEntity = entities.get(relatedEntityId); + if (relatedEntity != null) { + relatedEntity.addRelatedEntity( + existingEntity.getEntityType(), existingEntity.getEntityId()); + } else { + relatedEntity = new TimelineEntity(); + relatedEntity.setEntityId(relatedEntityId.getId()); + relatedEntity.setEntityType(relatedEntityId.getType()); + relatedEntity.setStartTime(existingEntity.getStartTime()); + relatedEntity.addRelatedEntity(existingEntity.getEntityType(), + existingEntity.getEntityId()); + entities.put(relatedEntityId, relatedEntity); + entityInsertTimes.put(relatedEntityId, System.currentTimeMillis()); + } + } + } + } + return response; + } + + private static TimelineEntity maskFields( + TimelineEntity entity, EnumSet fields) { + // Conceal the fields that are not going to be exposed + TimelineEntity entityToReturn = new TimelineEntity(); + entityToReturn.setEntityId(entity.getEntityId()); + entityToReturn.setEntityType(entity.getEntityType()); + entityToReturn.setStartTime(entity.getStartTime()); + // Deep copy + if (fields.contains(Field.EVENTS)) { + entityToReturn.addEvents(entity.getEvents()); + } else if (fields.contains(Field.LAST_EVENT_ONLY)) { + entityToReturn.addEvent(entity.getEvents().get(0)); + } else { + entityToReturn.setEvents(null); + } + if (fields.contains(Field.RELATED_ENTITIES)) { + entityToReturn.addRelatedEntities(entity.getRelatedEntities()); + } else { + entityToReturn.setRelatedEntities(null); + } + if (fields.contains(Field.PRIMARY_FILTERS)) { + entityToReturn.addPrimaryFilters(entity.getPrimaryFilters()); + } else { + entityToReturn.setPrimaryFilters(null); + } + if (fields.contains(Field.OTHER_INFO)) { + entityToReturn.addOtherInfo(entity.getOtherInfo()); + } else { + entityToReturn.setOtherInfo(null); + } + return entityToReturn; + } + + private static boolean matchFilter(Map tags, + NameValuePair filter) { + Object value = tags.get(filter.getName()); + if (value == null) { // doesn't have the filter + return false; + } else if (!value.equals(filter.getValue())) { // doesn't match the filter + return false; + } + return true; + } + + private static boolean matchPrimaryFilter(Map> tags, + NameValuePair filter) { + Set value = tags.get(filter.getName()); + if (value == null) { // doesn't have the filter + return false; + } else { + return value.contains(filter.getValue()); + } + } + + private static Object maybeConvert(Object o) { + if (o instanceof Long) { + Long l = (Long)o; + if (l >= Integer.MIN_VALUE && l <= Integer.MAX_VALUE) { + return l.intValue(); + } + } + return o; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/NameValuePair.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/NameValuePair.java new file mode 100644 index 0000000..1f17324 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/NameValuePair.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * A class holding a name and value pair, used for specifying filters in + * {@link TimelineReader}. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class NameValuePair { + String name; + Object value; + + public NameValuePair(String name, Object value) { + this.name = name; + this.value = value; + } + + /** + * Get the name. + * @return The name. + */ + public String getName() { + + return name; + } + + /** + * Get the value. + * @return The value. + */ + public Object getValue() { + return value; + } + + @Override + public String toString() { + return "{ name: " + name + ", value: " + value + " }"; + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineReader.java new file mode 100644 index 0000000..23bca34 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineReader.java @@ -0,0 +1,155 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import java.io.IOException; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Set; +import java.util.SortedSet; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; + +/** + * This interface is for retrieving timeline information. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public interface TimelineReader { + + /** + * Possible fields to retrieve for {@link #getEntities} and {@link #getEntity} + * . + */ + enum Field { + EVENTS, + RELATED_ENTITIES, + PRIMARY_FILTERS, + OTHER_INFO, + LAST_EVENT_ONLY + } + + /** + * Default limit for {@link #getEntities} and {@link #getEntityTimelines}. + */ + final long DEFAULT_LIMIT = 100; + + /** + * This method retrieves a list of entity information, {@link TimelineEntity}, + * sorted by the starting timestamp for the entity, descending. The starting + * timestamp of an entity is a timestamp specified by the client. If it is not + * explicitly specified, it will be chosen by the store to be the earliest + * timestamp of the events received in the first put for the entity. + * + * @param entityType + * The type of entities to return (required). + * @param limit + * A limit on the number of entities to return. If null, defaults to + * {@link #DEFAULT_LIMIT}. + * @param windowStart + * The earliest start timestamp to retrieve (exclusive). If null, + * defaults to retrieving all entities until the limit is reached. + * @param windowEnd + * The latest start timestamp to retrieve (inclusive). If null, + * defaults to {@link Long#MAX_VALUE} + * @param fromId + * If fromId is not null, retrieve entities earlier than and + * including the specified ID. If no start time is found for the + * specified ID, an empty list of entities will be returned. The + * windowEnd parameter will take precedence if the start time of this + * entity falls later than windowEnd. + * @param fromTs + * If fromTs is not null, ignore entities that were inserted into the + * store after the given timestamp. The entity's insert timestamp + * used for this comparison is the store's system time when the first + * put for the entity was received (not the entity's start time). + * @param primaryFilter + * Retrieves only entities that have the specified primary filter. If + * null, retrieves all entities. This is an indexed retrieval, and no + * entities that do not match the filter are scanned. + * @param secondaryFilters + * Retrieves only entities that have exact matches for all the + * specified filters in their primary filters or other info. This is + * not an indexed retrieval, so all entities are scanned but only + * those matching the filters are returned. + * @param fieldsToRetrieve + * Specifies which fields of the entity object to retrieve (see + * {@link Field}). If the set of fields contains + * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the + * most recent event for each entity is retrieved. If null, retrieves + * all fields. + * @return An {@link TimelineEntities} object. + * @throws IOException + */ + TimelineEntities getEntities(String entityType, + Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, + NameValuePair primaryFilter, Collection secondaryFilters, + EnumSet fieldsToRetrieve) throws IOException; + + /** + * This method retrieves the entity information for a given entity. + * + * @param entityId + * The entity whose information will be retrieved. + * @param entityType + * The type of the entity. + * @param fieldsToRetrieve + * Specifies which fields of the entity object to retrieve (see + * {@link Field}). If the set of fields contains + * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the + * most recent event for each entity is retrieved. If null, retrieves + * all fields. + * @return An {@link TimelineEntity} object. + * @throws IOException + */ + TimelineEntity getEntity(String entityId, String entityType, EnumSet + fieldsToRetrieve) throws IOException; + + /** + * This method retrieves the events for a list of entities all of the same + * entity type. The events for each entity are sorted in order of their + * timestamps, descending. + * + * @param entityType + * The type of entities to retrieve events for. + * @param entityIds + * The entity IDs to retrieve events for. + * @param limit + * A limit on the number of events to return for each entity. If + * null, defaults to {@link #DEFAULT_LIMIT} events per entity. + * @param windowStart + * If not null, retrieves only events later than the given time + * (exclusive) + * @param windowEnd + * If not null, retrieves only events earlier than the given time + * (inclusive) + * @param eventTypes + * Restricts the events returned to the given types. If null, events + * of all types will be returned. + * @return An {@link TimelineEvents} object. + * @throws IOException + */ + TimelineEvents getEntityTimelines(String entityType, + SortedSet entityIds, Long limit, Long windowStart, + Long windowEnd, Set eventTypes) throws IOException; +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineStore.java new file mode 100644 index 0000000..3f6f4ab --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineStore.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.service.Service; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; + +@Private +@Unstable +public interface TimelineStore extends + Service, TimelineReader, TimelineWriter { + + /** + * The system filter which will be automatically added to a + * {@link TimelineEntity}'s primary filter section when storing the entity. + * The filter key is case sensitive. Users are supposed not to use the key + * reserved by the timeline system. + */ + @Private + enum SystemFilter { + ENTITY_OWNER + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineWriter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineWriter.java new file mode 100644 index 0000000..a3e5aeb --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineWriter.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; + +import java.io.IOException; + +/** + * This interface is for storing timeline information. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public interface TimelineWriter { + + /** + * Stores entity information to the timeline store. Any errors occurring for + * individual put request objects will be reported in the response. + * + * @param data + * An {@link TimelineEntities} object. + * @return An {@link TimelinePutResponse} object. + * @throws IOException + */ + TimelinePutResponse put(TimelineEntities data) throws IOException; + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/package-info.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/package-info.java new file mode 100644 index 0000000..bf0fe79 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/package-info.java @@ -0,0 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@InterfaceAudience.Private +package org.apache.hadoop.yarn.server.timeline; +import org.apache.hadoop.classification.InterfaceAudience; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java new file mode 100644 index 0000000..848ad0b --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import java.io.IOException; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.security.AdminACLsManager; +import org.apache.hadoop.yarn.server.timeline.EntityIdentifier; +import org.apache.hadoop.yarn.server.timeline.TimelineStore.SystemFilter; + +import com.google.common.annotations.VisibleForTesting; + +/** + * TimelineACLsManager check the entity level timeline data access. + */ +@Private +public class TimelineACLsManager { + + private static final Log LOG = LogFactory.getLog(TimelineACLsManager.class); + + private AdminACLsManager adminAclsManager; + + public TimelineACLsManager(Configuration conf) { + this.adminAclsManager = new AdminACLsManager(conf); + } + + public boolean checkAccess(UserGroupInformation callerUGI, + TimelineEntity entity) throws YarnException, IOException { + if (LOG.isDebugEnabled()) { + LOG.debug("Verifying the access of " + callerUGI.getShortUserName() + + " on the timeline entity " + + new EntityIdentifier(entity.getEntityId(), entity.getEntityType())); + } + + if (!adminAclsManager.areACLsEnabled()) { + return true; + } + + Set values = + entity.getPrimaryFilters().get( + SystemFilter.ENTITY_OWNER.toString()); + if (values == null || values.size() != 1) { + throw new YarnException("Owner information of the timeline entity " + + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()) + + " is corrupted."); + } + String owner = values.iterator().next().toString(); + // TODO: Currently we just check the user is the admin or the timeline + // entity owner. In the future, we need to check whether the user is in the + // allowed user/group list + if (callerUGI != null + && (adminAclsManager.isAdmin(callerUGI) || + callerUGI.getShortUserName().equals(owner))) { + return true; + } + return false; + } + + @Private + @VisibleForTesting + public AdminACLsManager + setAdminACLsManager(AdminACLsManager adminAclsManager) { + AdminACLsManager oldAdminACLsManager = this.adminAclsManager; + this.adminAclsManager = adminAclsManager; + return oldAdminACLsManager; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java new file mode 100644 index 0000000..e6690a6 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import java.util.Properties; + +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.authentication.server.AuthenticationFilter; + +@Private +@Unstable +public class TimelineAuthenticationFilter extends AuthenticationFilter { + + @Override + protected Properties getConfiguration(String configPrefix, + FilterConfig filterConfig) throws ServletException { + // In yarn-site.xml, we can simply set type to "kerberos". However, we need + // to replace the name here to use the customized Kerberos + DT service + // instead of the standard Kerberos handler. + Properties properties = super.getConfiguration(configPrefix, filterConfig); + if (properties.getProperty(AUTH_TYPE).equals("kerberos")) { + properties.setProperty( + AUTH_TYPE, TimelineClientAuthenticationService.class.getName()); + } + return properties; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java new file mode 100644 index 0000000..53b27fb --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.FilterContainer; +import org.apache.hadoop.http.FilterInitializer; +import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.security.SecurityUtil; + +/** + *

+ * Initializes {@link TimelineAuthenticationFilter} which provides support for + * Kerberos HTTP SPNEGO authentication. + *

+ *

+ * It enables Kerberos HTTP SPNEGO plus delegation token authentication for the + * timeline server. + *

+ * Refer to the core-default.xml file, after the comment 'HTTP + * Authentication' for details on the configuration options. All related + * configuration properties have 'hadoop.http.authentication.' as prefix. + */ +public class TimelineAuthenticationFilterInitializer extends FilterInitializer { + + /** + * The configuration prefix of timeline Kerberos + DT authentication + */ + public static final String PREFIX = "yarn.timeline-service.http.authentication."; + + private static final String SIGNATURE_SECRET_FILE = + TimelineAuthenticationFilter.SIGNATURE_SECRET + ".file"; + + /** + *

+ * Initializes {@link TimelineAuthenticationFilter} + *

+ *

+ * Propagates to {@link TimelineAuthenticationFilter} configuration all YARN + * configuration properties prefixed with + * "yarn.timeline-service.authentication." + *

+ * + * @param container + * The filter container + * @param conf + * Configuration for run-time parameters + */ + @Override + public void initFilter(FilterContainer container, Configuration conf) { + Map filterConfig = new HashMap(); + + // setting the cookie path to root '/' so it is used for all resources. + filterConfig.put(TimelineAuthenticationFilter.COOKIE_PATH, "/"); + + for (Map.Entry entry : conf) { + String name = entry.getKey(); + if (name.startsWith(PREFIX)) { + String value = conf.get(name); + name = name.substring(PREFIX.length()); + filterConfig.put(name, value); + } + } + + String signatureSecretFile = filterConfig.get(SIGNATURE_SECRET_FILE); + if (signatureSecretFile != null) { + try { + StringBuilder secret = new StringBuilder(); + Reader reader = new FileReader(signatureSecretFile); + int c = reader.read(); + while (c > -1) { + secret.append((char) c); + c = reader.read(); + } + reader.close(); + filterConfig + .put(TimelineAuthenticationFilter.SIGNATURE_SECRET, + secret.toString()); + } catch (IOException ex) { + throw new RuntimeException( + "Could not read HTTP signature secret file: " + + signatureSecretFile); + } + } + + // Resolve _HOST into bind address + String bindAddress = conf.get(HttpServer2.BIND_ADDRESS); + String principal = + filterConfig.get(TimelineClientAuthenticationService.PRINCIPAL); + if (principal != null) { + try { + principal = SecurityUtil.getServerPrincipal(principal, bindAddress); + } catch (IOException ex) { + throw new RuntimeException( + "Could not resolve Kerberos principal name: " + ex.toString(), ex); + } + filterConfig.put(TimelineClientAuthenticationService.PRINCIPAL, + principal); + } + + container.addGlobalFilter("Timeline Authentication Filter", + TimelineAuthenticationFilter.class.getName(), + filterConfig); + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineClientAuthenticationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineClientAuthenticationService.java new file mode 100644 index 0000000..9581e82 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineClientAuthenticationService.java @@ -0,0 +1,236 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import java.io.IOException; +import java.io.Writer; +import java.text.MessageFormat; +import java.util.HashSet; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.MediaType; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.server.AuthenticationToken; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.api.records.timeline.TimelineDelegationTokenResponse; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenOperation; +import org.apache.hadoop.yarn.security.client.TimelineAuthenticationConsts; +import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.codehaus.jackson.map.ObjectMapper; + +/** + * Server side AuthenticationHandler that authenticates requests + * using the incoming delegation token as a 'delegation' query string parameter. + *

+ * If not delegation token is present in the request it delegates to the + * {@link KerberosAuthenticationHandler} + */ +@Private +@Unstable +public class TimelineClientAuthenticationService + extends KerberosAuthenticationHandler { + + public static final String TYPE = "kerberos-dt"; + private static final Set DELEGATION_TOKEN_OPS = new HashSet(); + private static final String OP_PARAM = "op"; + private static final String ENTER = System.getProperty("line.separator"); + + private ObjectMapper mapper; + + static { + DELEGATION_TOKEN_OPS.add( + TimelineDelegationTokenOperation.GETDELEGATIONTOKEN.toString()); + DELEGATION_TOKEN_OPS.add( + TimelineDelegationTokenOperation.RENEWDELEGATIONTOKEN.toString()); + DELEGATION_TOKEN_OPS.add( + TimelineDelegationTokenOperation.CANCELDELEGATIONTOKEN.toString()); + } + + public TimelineClientAuthenticationService() { + super(); + mapper = new ObjectMapper(); + YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + } + + /** + * Returns authentication type of the handler. + * + * @return delegationtoken-kerberos + */ + @Override + public String getType() { + return TYPE; + } + + @Override + public boolean managementOperation(AuthenticationToken token, + HttpServletRequest request, HttpServletResponse response) + throws IOException, AuthenticationException { + boolean requestContinues = true; + String op = request.getParameter(OP_PARAM); + op = (op != null) ? op.toUpperCase() : null; + if (DELEGATION_TOKEN_OPS.contains(op) && + !request.getMethod().equals("OPTIONS")) { + TimelineDelegationTokenOperation dtOp = + TimelineDelegationTokenOperation.valueOf(op); + if (dtOp.getHttpMethod().equals(request.getMethod())) { + if (dtOp.requiresKerberosCredentials() && token == null) { + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, + MessageFormat.format( + "Operation [{0}] requires SPNEGO authentication established", + dtOp)); + requestContinues = false; + } else { + TimelineDelegationTokenSecretManagerService secretManager = + AHSWebApp.getInstance() + .getTimelineDelegationTokenSecretManagerService(); + try { + TimelineDelegationTokenResponse res = null; + switch (dtOp) { + case GETDELEGATIONTOKEN: + UserGroupInformation ownerUGI = + UserGroupInformation.createRemoteUser(token.getUserName()); + String renewerParam = + request + .getParameter(TimelineAuthenticationConsts.RENEWER_PARAM); + if (renewerParam == null) { + renewerParam = token.getUserName(); + } + Token dToken = + secretManager.createToken(ownerUGI, renewerParam); + res = new TimelineDelegationTokenResponse(); + res.setType(TimelineAuthenticationConsts.DELEGATION_TOKEN_URL); + res.setContent(dToken.encodeToUrlString()); + break; + case RENEWDELEGATIONTOKEN: + case CANCELDELEGATIONTOKEN: + String tokenParam = + request + .getParameter(TimelineAuthenticationConsts.TOKEN_PARAM); + if (tokenParam == null) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST, + MessageFormat + .format( + "Operation [{0}] requires the parameter [{1}]", + dtOp, + TimelineAuthenticationConsts.TOKEN_PARAM)); + requestContinues = false; + } else { + if (dtOp == TimelineDelegationTokenOperation.CANCELDELEGATIONTOKEN) { + Token dt = + new Token(); + dt.decodeFromUrlString(tokenParam); + secretManager.cancelToken(dt, token.getUserName()); + } else { + Token dt = + new Token(); + dt.decodeFromUrlString(tokenParam); + long expirationTime = + secretManager.renewToken(dt, token.getUserName()); + res = new TimelineDelegationTokenResponse(); + res.setType(TimelineAuthenticationConsts.DELEGATION_TOKEN_EXPIRATION_TIME); + res.setContent(expirationTime); + } + } + break; + } + if (requestContinues) { + response.setStatus(HttpServletResponse.SC_OK); + if (res != null) { + response.setContentType(MediaType.APPLICATION_JSON); + Writer writer = response.getWriter(); + mapper.writeValue(writer, res); + writer.write(ENTER); + writer.flush(); + + } + requestContinues = false; + } + } catch (IOException e) { + throw new AuthenticationException(e.toString(), e); + } + } + } else { + response + .sendError( + HttpServletResponse.SC_BAD_REQUEST, + MessageFormat + .format( + "Wrong HTTP method [{0}] for operation [{1}], it should be [{2}]", + request.getMethod(), dtOp, dtOp.getHttpMethod())); + requestContinues = false; + } + } + return requestContinues; + } + + /** + * Authenticates a request looking for the delegation + * query-string parameter and verifying it is a valid token. If there is not + * delegation query-string parameter, it delegates the + * authentication to the {@link KerberosAuthenticationHandler} unless it is + * disabled. + * + * @param request + * the HTTP client request. + * @param response + * the HTTP client response. + * + * @return the authentication token for the authenticated request. + * @throws IOException + * thrown if an IO error occurred. + * @throws AuthenticationException + * thrown if the authentication failed. + */ + @Override + public AuthenticationToken authenticate(HttpServletRequest request, + HttpServletResponse response) + throws IOException, AuthenticationException { + AuthenticationToken token; + String delegationParam = + request + .getParameter(TimelineAuthenticationConsts.DELEGATION_PARAM); + if (delegationParam != null) { + Token dt = + new Token(); + dt.decodeFromUrlString(delegationParam); + TimelineDelegationTokenSecretManagerService secretManager = + AHSWebApp.getInstance() + .getTimelineDelegationTokenSecretManagerService(); + UserGroupInformation ugi = secretManager.verifyToken(dt); + final String shortName = ugi.getShortUserName(); + // creating a ephemeral token + token = new AuthenticationToken(shortName, ugi.getUserName(), getType()); + token.setExpires(0); + } else { + token = super.authenticate(request, response); + } + return token; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java new file mode 100644 index 0000000..1539513 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java @@ -0,0 +1,180 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; +import java.io.IOException; +import java.net.InetSocketAddress; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.util.timeline.TimelineUtils; + +/** + * The service wrapper of {@link TimelineDelegationTokenSecretManager} + */ +@Private +@Unstable +public class TimelineDelegationTokenSecretManagerService extends AbstractService { + + private TimelineDelegationTokenSecretManager secretManager = null; + private InetSocketAddress serviceAddr = null; + + public TimelineDelegationTokenSecretManagerService() { + super(TimelineDelegationTokenSecretManagerService.class.getName()); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + long secretKeyInterval = + conf.getLong(YarnConfiguration.DELEGATION_KEY_UPDATE_INTERVAL_KEY, + YarnConfiguration.DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT); + long tokenMaxLifetime = + conf.getLong(YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_KEY, + YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT); + long tokenRenewInterval = + conf.getLong(YarnConfiguration.DELEGATION_TOKEN_RENEW_INTERVAL_KEY, + YarnConfiguration.DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT); + secretManager = new TimelineDelegationTokenSecretManager(secretKeyInterval, + tokenMaxLifetime, tokenRenewInterval, + 3600000); + secretManager.startThreads(); + + serviceAddr = TimelineUtils.getTimelineTokenServiceAddress(getConfig()); + super.init(conf); + } + + @Override + protected void serviceStop() throws Exception { + secretManager.stopThreads(); + super.stop(); + } + + /** + * Creates a delegation token. + * + * @param ugi UGI creating the token. + * @param renewer token renewer. + * @return new delegation token. + * @throws IOException thrown if the token could not be created. + */ + public Token createToken( + UserGroupInformation ugi, String renewer) throws IOException { + renewer = (renewer == null) ? ugi.getShortUserName() : renewer; + String user = ugi.getUserName(); + Text owner = new Text(user); + Text realUser = null; + if (ugi.getRealUser() != null) { + realUser = new Text(ugi.getRealUser().getUserName()); + } + TimelineDelegationTokenIdentifier tokenIdentifier = + new TimelineDelegationTokenIdentifier(owner, new Text(renewer), realUser); + Token token = + new Token(tokenIdentifier, secretManager); + SecurityUtil.setTokenService(token, serviceAddr); + return token; + } + + /** + * Renews a delegation token. + * + * @param token delegation token to renew. + * @param renewer token renewer. + * @throws IOException thrown if the token could not be renewed. + */ + public long renewToken(Token token, + String renewer) throws IOException { + return secretManager.renewToken(token, renewer); + } + + /** + * Cancels a delegation token. + * + * @param token delegation token to cancel. + * @param canceler token canceler. + * @throws IOException thrown if the token could not be canceled. + */ + public void cancelToken(Token token, + String canceler) throws IOException { + secretManager.cancelToken(token, canceler); + } + + /** + * Verifies a delegation token. + * + * @param token delegation token to verify. + * @return the UGI for the token. + * @throws IOException thrown if the token could not be verified. + */ + public UserGroupInformation verifyToken(Token token) + throws IOException { + ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier()); + DataInputStream dis = new DataInputStream(buf); + TimelineDelegationTokenIdentifier id = new TimelineDelegationTokenIdentifier(); + try { + id.readFields(dis); + secretManager.verifyToken(id, token.getPassword()); + } finally { + dis.close(); + } + return id.getUser(); + } + + /** + * Create a timeline secret manager + * + * @param delegationKeyUpdateInterval + * the number of seconds for rolling new secret keys. + * @param delegationTokenMaxLifetime + * the maximum lifetime of the delegation tokens + * @param delegationTokenRenewInterval + * how often the tokens must be renewed + * @param delegationTokenRemoverScanInterval + * how often the tokens are scanned for expired tokens + */ + @Private + @Unstable + public static class TimelineDelegationTokenSecretManager extends + AbstractDelegationTokenSecretManager { + + public TimelineDelegationTokenSecretManager(long delegationKeyUpdateInterval, + long delegationTokenMaxLifetime, long delegationTokenRenewInterval, + long delegationTokenRemoverScanInterval) { + super(delegationKeyUpdateInterval, delegationTokenMaxLifetime, + delegationTokenRenewInterval, delegationTokenRemoverScanInterval); + } + + @Override + public TimelineDelegationTokenIdentifier createIdentifier() { + return new TimelineDelegationTokenIdentifier(); + } + + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java new file mode 100644 index 0000000..8c7762d --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java @@ -0,0 +1,539 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.webapp; + +import static org.apache.hadoop.yarn.util.StringHelper.CSV_JOINER; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.timeline.EntityIdentifier; +import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper; +import org.apache.hadoop.yarn.server.timeline.NameValuePair; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineReader.Field; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; +import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import org.apache.hadoop.yarn.webapp.BadRequestException; +import org.apache.hadoop.yarn.webapp.NotFoundException; + +import com.google.inject.Inject; +import com.google.inject.Singleton; + +@Singleton +@Path("/ws/v1/timeline") +//TODO: support XML serialization/deserialization +public class TimelineWebServices { + + private static final Log LOG = LogFactory.getLog(TimelineWebServices.class); + + private TimelineStore store; + private TimelineACLsManager timelineACLsManager; + + @Inject + public TimelineWebServices(TimelineStore store, + TimelineACLsManager timelineACLsManager) { + this.store = store; + this.timelineACLsManager = timelineACLsManager; + } + + @XmlRootElement(name = "about") + @XmlAccessorType(XmlAccessType.NONE) + @Public + @Unstable + public static class AboutInfo { + + private String about; + + public AboutInfo() { + + } + + public AboutInfo(String about) { + this.about = about; + } + + @XmlElement(name = "About") + public String getAbout() { + return about; + } + + public void setAbout(String about) { + this.about = about; + } + + } + + /** + * Return the description of the timeline web services. + */ + @GET + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public AboutInfo about( + @Context HttpServletRequest req, + @Context HttpServletResponse res) { + init(res); + return new AboutInfo("Timeline API"); + } + + /** + * Return a list of entities that match the given parameters. + */ + @GET + @Path("/{entityType}") + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelineEntities getEntities( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @PathParam("entityType") String entityType, + @QueryParam("primaryFilter") String primaryFilter, + @QueryParam("secondaryFilter") String secondaryFilter, + @QueryParam("windowStart") String windowStart, + @QueryParam("windowEnd") String windowEnd, + @QueryParam("fromId") String fromId, + @QueryParam("fromTs") String fromTs, + @QueryParam("limit") String limit, + @QueryParam("fields") String fields) { + init(res); + TimelineEntities entities = null; + try { + EnumSet fieldEnums = parseFieldsStr(fields, ","); + boolean modified = extendFields(fieldEnums); + UserGroupInformation callerUGI = getUser(req); + entities = store.getEntities( + parseStr(entityType), + parseLongStr(limit), + parseLongStr(windowStart), + parseLongStr(windowEnd), + parseStr(fromId), + parseLongStr(fromTs), + parsePairStr(primaryFilter, ":"), + parsePairsStr(secondaryFilter, ",", ":"), + fieldEnums); + if (entities != null) { + Iterator entitiesItr = + entities.getEntities().iterator(); + while (entitiesItr.hasNext()) { + TimelineEntity entity = entitiesItr.next(); + try { + // check ACLs + if (!timelineACLsManager.checkAccess(callerUGI, entity)) { + entitiesItr.remove(); + } else { + // clean up system data + if (modified) { + entity.setPrimaryFilters(null); + } else { + cleanupOwnerInfo(entity); + } + } + } catch (YarnException e) { + LOG.error("Error when verifying access for user " + callerUGI + + " on the events of the timeline entity " + + new EntityIdentifier(entity.getEntityId(), + entity.getEntityType()), e); + entitiesItr.remove(); + } + } + } + } catch (NumberFormatException e) { + throw new BadRequestException( + "windowStart, windowEnd or limit is not a numeric value."); + } catch (IllegalArgumentException e) { + throw new BadRequestException("requested invalid field."); + } catch (IOException e) { + LOG.error("Error getting entities", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + if (entities == null) { + return new TimelineEntities(); + } + return entities; + } + + /** + * Return a single entity of the given entity type and Id. + */ + @GET + @Path("/{entityType}/{entityId}") + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelineEntity getEntity( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @PathParam("entityType") String entityType, + @PathParam("entityId") String entityId, + @QueryParam("fields") String fields) { + init(res); + TimelineEntity entity = null; + try { + EnumSet fieldEnums = parseFieldsStr(fields, ","); + boolean modified = extendFields(fieldEnums); + entity = + store.getEntity(parseStr(entityId), parseStr(entityType), + fieldEnums); + if (entity != null) { + // check ACLs + UserGroupInformation callerUGI = getUser(req); + if (!timelineACLsManager.checkAccess(callerUGI, entity)) { + entity = null; + } else { + // clean up the system data + if (modified) { + entity.setPrimaryFilters(null); + } else { + cleanupOwnerInfo(entity); + } + } + } + } catch (IllegalArgumentException e) { + throw new BadRequestException( + "requested invalid field."); + } catch (IOException e) { + LOG.error("Error getting entity", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } catch (YarnException e) { + LOG.error("Error getting entity", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + if (entity == null) { + throw new NotFoundException("Timeline entity " + + new EntityIdentifier(parseStr(entityId), parseStr(entityType)) + + " is not found"); + } + return entity; + } + + /** + * Return the events that match the given parameters. + */ + @GET + @Path("/{entityType}/events") + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelineEvents getEvents( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @PathParam("entityType") String entityType, + @QueryParam("entityId") String entityId, + @QueryParam("eventType") String eventType, + @QueryParam("windowStart") String windowStart, + @QueryParam("windowEnd") String windowEnd, + @QueryParam("limit") String limit) { + init(res); + TimelineEvents events = null; + try { + UserGroupInformation callerUGI = getUser(req); + events = store.getEntityTimelines( + parseStr(entityType), + parseArrayStr(entityId, ","), + parseLongStr(limit), + parseLongStr(windowStart), + parseLongStr(windowEnd), + parseArrayStr(eventType, ",")); + if (events != null) { + Iterator eventsItr = + events.getAllEvents().iterator(); + while (eventsItr.hasNext()) { + TimelineEvents.EventsOfOneEntity eventsOfOneEntity = eventsItr.next(); + try { + TimelineEntity entity = store.getEntity( + eventsOfOneEntity.getEntityId(), + eventsOfOneEntity.getEntityType(), + EnumSet.of(Field.PRIMARY_FILTERS)); + // check ACLs + if (!timelineACLsManager.checkAccess(callerUGI, entity)) { + eventsItr.remove(); + } + } catch (Exception e) { + LOG.error("Error when verifying access for user " + callerUGI + + " on the events of the timeline entity " + + new EntityIdentifier(eventsOfOneEntity.getEntityId(), + eventsOfOneEntity.getEntityType()), e); + eventsItr.remove(); + } + } + } + } catch (NumberFormatException e) { + throw new BadRequestException( + "windowStart, windowEnd or limit is not a numeric value."); + } catch (IOException e) { + LOG.error("Error getting entity timelines", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + if (events == null) { + return new TimelineEvents(); + } + return events; + } + + /** + * Store the given entities into the timeline store, and return the errors + * that happen during storing. + */ + @POST + @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelinePutResponse postEntities( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + TimelineEntities entities) { + init(res); + if (entities == null) { + return new TimelinePutResponse(); + } + UserGroupInformation callerUGI = getUser(req); + try { + List entityIDs = new ArrayList(); + TimelineEntities entitiesToPut = new TimelineEntities(); + List errors = + new ArrayList(); + for (TimelineEntity entity : entities.getEntities()) { + EntityIdentifier entityID = + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); + + // check if there is existing entity + TimelineEntity existingEntity = null; + try { + existingEntity = + store.getEntity(entityID.getId(), entityID.getType(), + EnumSet.of(Field.PRIMARY_FILTERS)); + if (existingEntity != null + && !timelineACLsManager.checkAccess(callerUGI, existingEntity)) { + throw new YarnException("The timeline entity " + entityID + + " was not put by " + callerUGI + " before"); + } + } catch (Exception e) { + // Skip the entity which already exists and was put by others + LOG.warn("Skip the timeline entity: " + entityID + ", because " + + e.getMessage()); + TimelinePutResponse.TimelinePutError error = + new TimelinePutResponse.TimelinePutError(); + error.setEntityId(entityID.getId()); + error.setEntityType(entityID.getType()); + error.setErrorCode( + TimelinePutResponse.TimelinePutError.ACCESS_DENIED); + errors.add(error); + continue; + } + + // inject owner information for the access check if this is the first + // time to post the entity, in case it's the admin who is updating + // the timeline data. + try { + if (existingEntity == null) { + injectOwnerInfo(entity, + callerUGI == null ? "" : callerUGI.getShortUserName()); + } + } catch (YarnException e) { + // Skip the entity which messes up the primary filter and record the + // error + LOG.warn("Skip the timeline entity: " + entityID + ", because " + + e.getMessage()); + TimelinePutResponse.TimelinePutError error = + new TimelinePutResponse.TimelinePutError(); + error.setEntityId(entityID.getId()); + error.setEntityType(entityID.getType()); + error.setErrorCode( + TimelinePutResponse.TimelinePutError.SYSTEM_FILTER_CONFLICT); + errors.add(error); + continue; + } + + entityIDs.add(entityID); + entitiesToPut.addEntity(entity); + if (LOG.isDebugEnabled()) { + LOG.debug("Storing the entity " + entityID + ", JSON-style content: " + + TimelineUtils.dumpTimelineRecordtoJSON(entity)); + } + } + if (LOG.isDebugEnabled()) { + LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs)); + } + TimelinePutResponse response = store.put(entitiesToPut); + // add the errors of timeline system filter key conflict + response.addErrors(errors); + return response; + } catch (IOException e) { + LOG.error("Error putting entities", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + } + + private void init(HttpServletResponse response) { + response.setContentType(null); + } + + private static SortedSet parseArrayStr(String str, String delimiter) { + if (str == null) { + return null; + } + SortedSet strSet = new TreeSet(); + String[] strs = str.split(delimiter); + for (String aStr : strs) { + strSet.add(aStr.trim()); + } + return strSet; + } + + private static NameValuePair parsePairStr(String str, String delimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(delimiter, 2); + try { + return new NameValuePair(strs[0].trim(), + GenericObjectMapper.OBJECT_READER.readValue(strs[1].trim())); + } catch (Exception e) { + // didn't work as an Object, keep it as a String + return new NameValuePair(strs[0].trim(), strs[1].trim()); + } + } + + private static Collection parsePairsStr( + String str, String aDelimiter, String pDelimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(aDelimiter); + Set pairs = new HashSet(); + for (String aStr : strs) { + pairs.add(parsePairStr(aStr, pDelimiter)); + } + return pairs; + } + + private static EnumSet parseFieldsStr(String str, String delimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(delimiter); + List fieldList = new ArrayList(); + for (String s : strs) { + s = s.trim().toUpperCase(); + if (s.equals("EVENTS")) { + fieldList.add(Field.EVENTS); + } else if (s.equals("LASTEVENTONLY")) { + fieldList.add(Field.LAST_EVENT_ONLY); + } else if (s.equals("RELATEDENTITIES")) { + fieldList.add(Field.RELATED_ENTITIES); + } else if (s.equals("PRIMARYFILTERS")) { + fieldList.add(Field.PRIMARY_FILTERS); + } else if (s.equals("OTHERINFO")) { + fieldList.add(Field.OTHER_INFO); + } else { + throw new IllegalArgumentException("Requested nonexistent field " + s); + } + } + if (fieldList.size() == 0) { + return null; + } + Field f1 = fieldList.remove(fieldList.size() - 1); + if (fieldList.size() == 0) { + return EnumSet.of(f1); + } else { + return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()])); + } + } + + private static boolean extendFields(EnumSet fieldEnums) { + boolean modified = false; + if (fieldEnums != null && !fieldEnums.contains(Field.PRIMARY_FILTERS)) { + fieldEnums.add(Field.PRIMARY_FILTERS); + modified = true; + } + return modified; + } + private static Long parseLongStr(String str) { + return str == null ? null : Long.parseLong(str.trim()); + } + + private static String parseStr(String str) { + return str == null ? null : str.trim(); + } + + private static UserGroupInformation getUser(HttpServletRequest req) { + String remoteUser = req.getRemoteUser(); + UserGroupInformation callerUGI = null; + if (remoteUser != null) { + callerUGI = UserGroupInformation.createRemoteUser(remoteUser); + } + return callerUGI; + } + + private static void injectOwnerInfo(TimelineEntity timelineEntity, + String owner) throws YarnException { + if (timelineEntity.getPrimaryFilters() != null && + timelineEntity.getPrimaryFilters().containsKey( + TimelineStore.SystemFilter.ENTITY_OWNER)) { + throw new YarnException( + "User should not use the timeline system filter key: " + + TimelineStore.SystemFilter.ENTITY_OWNER); + } + timelineEntity.addPrimaryFilter( + TimelineStore.SystemFilter.ENTITY_OWNER + .toString(), owner); + } + + private static void cleanupOwnerInfo(TimelineEntity timelineEntity) { + if (timelineEntity.getPrimaryFilters() != null) { + timelineEntity.getPrimaryFilters().remove( + TimelineStore.SystemFilter.ENTITY_OWNER.toString()); + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java deleted file mode 100644 index d684a27..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java +++ /dev/null @@ -1,102 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.io.WritableComparator; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper; -import org.junit.Test; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class TestGenericObjectMapper { - - @Test - public void testEncoding() { - testEncoding(Long.MAX_VALUE); - testEncoding(Long.MIN_VALUE); - testEncoding(0l); - testEncoding(128l); - testEncoding(256l); - testEncoding(512l); - testEncoding(-256l); - } - - private static void testEncoding(long l) { - byte[] b = GenericObjectMapper.writeReverseOrderedLong(l); - assertEquals("error decoding", l, - GenericObjectMapper.readReverseOrderedLong(b, 0)); - byte[] buf = new byte[16]; - System.arraycopy(b, 0, buf, 5, 8); - assertEquals("error decoding at offset", l, - GenericObjectMapper.readReverseOrderedLong(buf, 5)); - if (l > Long.MIN_VALUE) { - byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1); - assertEquals("error preserving ordering", 1, - WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length)); - } - if (l < Long.MAX_VALUE) { - byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1); - assertEquals("error preserving ordering", 1, - WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length)); - } - } - - private static void verify(Object o) throws IOException { - assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o))); - } - - @Test - public void testValueTypes() throws IOException { - verify(Integer.MAX_VALUE); - verify(Integer.MIN_VALUE); - assertEquals(Integer.MAX_VALUE, GenericObjectMapper.read( - GenericObjectMapper.write((long) Integer.MAX_VALUE))); - assertEquals(Integer.MIN_VALUE, GenericObjectMapper.read( - GenericObjectMapper.write((long) Integer.MIN_VALUE))); - verify((long)Integer.MAX_VALUE + 1l); - verify((long)Integer.MIN_VALUE - 1l); - - verify(Long.MAX_VALUE); - verify(Long.MIN_VALUE); - - assertEquals(42, GenericObjectMapper.read(GenericObjectMapper.write(42l))); - verify(42); - verify(1.23); - verify("abc"); - verify(true); - List list = new ArrayList(); - list.add("123"); - list.add("abc"); - verify(list); - Map map = new HashMap(); - map.put("k1","v1"); - map.put("k2","v2"); - verify(map); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java deleted file mode 100644 index 59449f8..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java +++ /dev/null @@ -1,265 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.io.File; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.iq80.leveldb.DBIterator; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -@InterfaceAudience.Private -@InterfaceStability.Unstable -public class TestLeveldbTimelineStore extends TimelineStoreTestUtils { - private FileContext fsContext; - private File fsPath; - - @Before - public void setup() throws Exception { - fsContext = FileContext.getLocalFSFileContext(); - Configuration conf = new YarnConfiguration(); - fsPath = new File("target", this.getClass().getSimpleName() + - "-tmpDir").getAbsoluteFile(); - fsContext.delete(new Path(fsPath.getAbsolutePath()), true); - conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH, - fsPath.getAbsolutePath()); - conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false); - store = new LeveldbTimelineStore(); - store.init(conf); - store.start(); - loadTestData(); - loadVerificationData(); - } - - @After - public void tearDown() throws Exception { - store.stop(); - fsContext.delete(new Path(fsPath.getAbsolutePath()), true); - } - - @Test - public void testRootDirPermission() throws IOException { - FileSystem fs = FileSystem.getLocal(new YarnConfiguration()); - FileStatus file = fs.getFileStatus( - new Path(fsPath.getAbsolutePath(), LeveldbTimelineStore.FILENAME)); - assertNotNull(file); - assertEquals(LeveldbTimelineStore.LEVELDB_DIR_UMASK, file.getPermission()); - } - - @Test - public void testGetSingleEntity() throws IOException { - super.testGetSingleEntity(); - ((LeveldbTimelineStore)store).clearStartTimeCache(); - super.testGetSingleEntity(); - loadTestData(); - } - - @Test - public void testGetEntities() throws IOException { - super.testGetEntities(); - } - - @Test - public void testGetEntitiesWithFromId() throws IOException { - super.testGetEntitiesWithFromId(); - } - - @Test - public void testGetEntitiesWithFromTs() throws IOException { - super.testGetEntitiesWithFromTs(); - } - - @Test - public void testGetEntitiesWithPrimaryFilters() throws IOException { - super.testGetEntitiesWithPrimaryFilters(); - } - - @Test - public void testGetEntitiesWithSecondaryFilters() throws IOException { - super.testGetEntitiesWithSecondaryFilters(); - } - - @Test - public void testGetEvents() throws IOException { - super.testGetEvents(); - } - - @Test - public void testCacheSizes() { - Configuration conf = new Configuration(); - assertEquals(10000, LeveldbTimelineStore.getStartTimeReadCacheSize(conf)); - assertEquals(10000, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf)); - conf.setInt( - YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE, - 10001); - assertEquals(10001, LeveldbTimelineStore.getStartTimeReadCacheSize(conf)); - conf = new Configuration(); - conf.setInt( - YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE, - 10002); - assertEquals(10002, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf)); - } - - private boolean deleteNextEntity(String entityType, byte[] ts) - throws IOException, InterruptedException { - DBIterator iterator = null; - DBIterator pfIterator = null; - try { - iterator = ((LeveldbTimelineStore)store).getDbIterator(false); - pfIterator = ((LeveldbTimelineStore)store).getDbIterator(false); - return ((LeveldbTimelineStore)store).deleteNextEntity(entityType, ts, - iterator, pfIterator, false); - } finally { - IOUtils.cleanup(null, iterator, pfIterator); - } - } - - @Test - public void testGetEntityTypes() throws IOException { - List entityTypes = ((LeveldbTimelineStore)store).getEntityTypes(); - assertEquals(4, entityTypes.size()); - assertEquals(entityType1, entityTypes.get(0)); - assertEquals(entityType2, entityTypes.get(1)); - assertEquals(entityType4, entityTypes.get(2)); - assertEquals(entityType5, entityTypes.get(3)); - } - - @Test - public void testDeleteEntities() throws IOException, InterruptedException { - assertEquals(2, getEntities("type_1").size()); - assertEquals(1, getEntities("type_2").size()); - - assertEquals(false, deleteNextEntity(entityType1, - writeReverseOrderedLong(122l))); - assertEquals(2, getEntities("type_1").size()); - assertEquals(1, getEntities("type_2").size()); - - assertEquals(true, deleteNextEntity(entityType1, - writeReverseOrderedLong(123l))); - List entities = getEntities("type_2"); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId2, entityType2, events2, Collections.singletonMap( - entityType1, Collections.singleton(entityId1b)), EMPTY_PRIMARY_FILTERS, - EMPTY_MAP, entities.get(0)); - entities = getEntitiesWithPrimaryFilter("type_1", userFilter); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - ((LeveldbTimelineStore)store).discardOldEntities(-123l); - assertEquals(1, getEntities("type_1").size()); - assertEquals(0, getEntities("type_2").size()); - assertEquals(3, ((LeveldbTimelineStore)store).getEntityTypes().size()); - - ((LeveldbTimelineStore)store).discardOldEntities(123l); - assertEquals(0, getEntities("type_1").size()); - assertEquals(0, getEntities("type_2").size()); - assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size()); - assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); - } - - @Test - public void testDeleteEntitiesPrimaryFilters() - throws IOException, InterruptedException { - Map> primaryFilter = - Collections.singletonMap("user", Collections.singleton( - (Object) "otheruser")); - TimelineEntities atsEntities = new TimelineEntities(); - atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b, - entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter, - null))); - TimelinePutResponse response = store.put(atsEntities); - assertEquals(0, response.getErrors().size()); - - NameValuePair pfPair = new NameValuePair("user", "otheruser"); - List entities = getEntitiesWithPrimaryFilter("type_1", - pfPair); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2), - EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0)); - - entities = getEntitiesWithPrimaryFilter("type_1", userFilter); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - ((LeveldbTimelineStore)store).discardOldEntities(-123l); - assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size()); - assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); - - ((LeveldbTimelineStore)store).discardOldEntities(123l); - assertEquals(0, getEntities("type_1").size()); - assertEquals(0, getEntities("type_2").size()); - assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size()); - - assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size()); - assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); - } - - @Test - public void testFromTsWithDeletion() - throws IOException, InterruptedException { - long l = System.currentTimeMillis(); - assertEquals(2, getEntitiesFromTs("type_1", l).size()); - assertEquals(1, getEntitiesFromTs("type_2", l).size()); - assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - l).size()); - ((LeveldbTimelineStore)store).discardOldEntities(123l); - assertEquals(0, getEntitiesFromTs("type_1", l).size()); - assertEquals(0, getEntitiesFromTs("type_2", l).size()); - assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - l).size()); - assertEquals(0, getEntities("type_1").size()); - assertEquals(0, getEntities("type_2").size()); - assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - l).size()); - loadTestData(); - assertEquals(0, getEntitiesFromTs("type_1", l).size()); - assertEquals(0, getEntitiesFromTs("type_2", l).size()); - assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - l).size()); - assertEquals(2, getEntities("type_1").size()); - assertEquals(1, getEntities("type_2").size()); - assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java deleted file mode 100644 index 415de53..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -public class TestMemoryTimelineStore extends TimelineStoreTestUtils { - - @Before - public void setup() throws Exception { - store = new MemoryTimelineStore(); - store.init(new YarnConfiguration()); - store.start(); - loadTestData(); - loadVerificationData(); - } - - @After - public void tearDown() throws Exception { - store.stop(); - } - - public TimelineStore getTimelineStore() { - return store; - } - - @Test - public void testGetSingleEntity() throws IOException { - super.testGetSingleEntity(); - } - - @Test - public void testGetEntities() throws IOException { - super.testGetEntities(); - } - - @Test - public void testGetEntitiesWithFromId() throws IOException { - super.testGetEntitiesWithFromId(); - } - - @Test - public void testGetEntitiesWithFromTs() throws IOException { - super.testGetEntitiesWithFromTs(); - } - - @Test - public void testGetEntitiesWithPrimaryFilters() throws IOException { - super.testGetEntitiesWithPrimaryFilters(); - } - - @Test - public void testGetEntitiesWithSecondaryFilters() throws IOException { - super.testGetEntitiesWithSecondaryFilters(); - } - - @Test - public void testGetEvents() throws IOException { - super.testGetEvents(); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java deleted file mode 100644 index d760536..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java +++ /dev/null @@ -1,789 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field; - -public class TimelineStoreTestUtils { - - protected static final List EMPTY_EVENTS = - Collections.emptyList(); - protected static final Map EMPTY_MAP = - Collections.emptyMap(); - protected static final Map> EMPTY_PRIMARY_FILTERS = - Collections.emptyMap(); - protected static final Map> EMPTY_REL_ENTITIES = - Collections.emptyMap(); - - protected TimelineStore store; - protected String entityId1; - protected String entityType1; - protected String entityId1b; - protected String entityId2; - protected String entityType2; - protected String entityId4; - protected String entityType4; - protected String entityId5; - protected String entityType5; - protected Map> primaryFilters; - protected Map secondaryFilters; - protected Map allFilters; - protected Map otherInfo; - protected Map> relEntityMap; - protected Map> relEntityMap2; - protected NameValuePair userFilter; - protected NameValuePair numericFilter1; - protected NameValuePair numericFilter2; - protected NameValuePair numericFilter3; - protected Collection goodTestingFilters; - protected Collection badTestingFilters; - protected TimelineEvent ev1; - protected TimelineEvent ev2; - protected TimelineEvent ev3; - protected TimelineEvent ev4; - protected Map eventInfo; - protected List events1; - protected List events2; - protected long beforeTs; - - /** - * Load test data into the given store - */ - protected void loadTestData() throws IOException { - beforeTs = System.currentTimeMillis()-1; - TimelineEntities entities = new TimelineEntities(); - Map> primaryFilters = - new HashMap>(); - Set l1 = new HashSet(); - l1.add("username"); - Set l2 = new HashSet(); - l2.add((long)Integer.MAX_VALUE); - Set l3 = new HashSet(); - l3.add("123abc"); - Set l4 = new HashSet(); - l4.add((long)Integer.MAX_VALUE + 1l); - primaryFilters.put("user", l1); - primaryFilters.put("appname", l2); - primaryFilters.put("other", l3); - primaryFilters.put("long", l4); - Map secondaryFilters = new HashMap(); - secondaryFilters.put("startTime", 123456l); - secondaryFilters.put("status", "RUNNING"); - Map otherInfo1 = new HashMap(); - otherInfo1.put("info1", "val1"); - otherInfo1.putAll(secondaryFilters); - - String entityId1 = "id_1"; - String entityType1 = "type_1"; - String entityId1b = "id_2"; - String entityId2 = "id_2"; - String entityType2 = "type_2"; - String entityId4 = "id_4"; - String entityType4 = "type_4"; - String entityId5 = "id_5"; - String entityType5 = "type_5"; - - Map> relatedEntities = - new HashMap>(); - relatedEntities.put(entityType2, Collections.singleton(entityId2)); - - TimelineEvent ev3 = createEvent(789l, "launch_event", null); - TimelineEvent ev4 = createEvent(-123l, "init_event", null); - List events = new ArrayList(); - events.add(ev3); - events.add(ev4); - entities.setEntities(Collections.singletonList(createEntity(entityId2, - entityType2, null, events, null, null, null))); - TimelinePutResponse response = store.put(entities); - assertEquals(0, response.getErrors().size()); - - TimelineEvent ev1 = createEvent(123l, "start_event", null); - entities.setEntities(Collections.singletonList(createEntity(entityId1, - entityType1, 123l, Collections.singletonList(ev1), - relatedEntities, primaryFilters, otherInfo1))); - response = store.put(entities); - assertEquals(0, response.getErrors().size()); - entities.setEntities(Collections.singletonList(createEntity(entityId1b, - entityType1, null, Collections.singletonList(ev1), relatedEntities, - primaryFilters, otherInfo1))); - response = store.put(entities); - assertEquals(0, response.getErrors().size()); - - Map eventInfo = new HashMap(); - eventInfo.put("event info 1", "val1"); - TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo); - Map otherInfo2 = new HashMap(); - otherInfo2.put("info2", "val2"); - entities.setEntities(Collections.singletonList(createEntity(entityId1, - entityType1, null, Collections.singletonList(ev2), null, - primaryFilters, otherInfo2))); - response = store.put(entities); - assertEquals(0, response.getErrors().size()); - entities.setEntities(Collections.singletonList(createEntity(entityId1b, - entityType1, 789l, Collections.singletonList(ev2), null, - primaryFilters, otherInfo2))); - response = store.put(entities); - assertEquals(0, response.getErrors().size()); - - entities.setEntities(Collections.singletonList(createEntity( - "badentityid", "badentity", null, null, null, null, otherInfo1))); - response = store.put(entities); - assertEquals(1, response.getErrors().size()); - TimelinePutError error = response.getErrors().get(0); - assertEquals("badentityid", error.getEntityId()); - assertEquals("badentity", error.getEntityType()); - assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode()); - - relatedEntities.clear(); - relatedEntities.put(entityType5, Collections.singleton(entityId5)); - entities.setEntities(Collections.singletonList(createEntity(entityId4, - entityType4, 42l, null, relatedEntities, null, null))); - response = store.put(entities); - assertEquals(0, response.getErrors().size()); - } - - /** - * Load verification data - */ - protected void loadVerificationData() throws Exception { - userFilter = new NameValuePair("user", "username"); - numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE); - numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l); - numericFilter3 = new NameValuePair("other", "123abc"); - goodTestingFilters = new ArrayList(); - goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE)); - goodTestingFilters.add(new NameValuePair("status", "RUNNING")); - badTestingFilters = new ArrayList(); - badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE)); - badTestingFilters.add(new NameValuePair("status", "FINISHED")); - - primaryFilters = new HashMap>(); - Set l1 = new HashSet(); - l1.add("username"); - Set l2 = new HashSet(); - l2.add(Integer.MAX_VALUE); - Set l3 = new HashSet(); - l3.add("123abc"); - Set l4 = new HashSet(); - l4.add((long)Integer.MAX_VALUE + 1l); - primaryFilters.put("user", l1); - primaryFilters.put("appname", l2); - primaryFilters.put("other", l3); - primaryFilters.put("long", l4); - secondaryFilters = new HashMap(); - secondaryFilters.put("startTime", 123456); - secondaryFilters.put("status", "RUNNING"); - allFilters = new HashMap(); - allFilters.putAll(secondaryFilters); - for (Entry> pf : primaryFilters.entrySet()) { - for (Object o : pf.getValue()) { - allFilters.put(pf.getKey(), o); - } - } - otherInfo = new HashMap(); - otherInfo.put("info1", "val1"); - otherInfo.put("info2", "val2"); - otherInfo.putAll(secondaryFilters); - - entityId1 = "id_1"; - entityType1 = "type_1"; - entityId1b = "id_2"; - entityId2 = "id_2"; - entityType2 = "type_2"; - entityId4 = "id_4"; - entityType4 = "type_4"; - entityId5 = "id_5"; - entityType5 = "type_5"; - - ev1 = createEvent(123l, "start_event", null); - - eventInfo = new HashMap(); - eventInfo.put("event info 1", "val1"); - ev2 = createEvent(456l, "end_event", eventInfo); - events1 = new ArrayList(); - events1.add(ev2); - events1.add(ev1); - - relEntityMap = - new HashMap>(); - Set ids = new HashSet(); - ids.add(entityId1); - ids.add(entityId1b); - relEntityMap.put(entityType1, ids); - - relEntityMap2 = - new HashMap>(); - relEntityMap2.put(entityType4, Collections.singleton(entityId4)); - - ev3 = createEvent(789l, "launch_event", null); - ev4 = createEvent(-123l, "init_event", null); - events2 = new ArrayList(); - events2.add(ev3); - events2.add(ev4); - } - - public void testGetSingleEntity() throws IOException { - // test getting entity info - verifyEntityInfo(null, null, null, null, null, null, - store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class))); - - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, 123l, store.getEntity(entityId1, - entityType1, EnumSet.allOf(Field.class))); - - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, 123l, store.getEntity(entityId1b, - entityType1, EnumSet.allOf(Field.class))); - - verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2, - entityType2, EnumSet.allOf(Field.class))); - - verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4, - entityType4, EnumSet.allOf(Field.class))); - - verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5, - entityType5, EnumSet.allOf(Field.class))); - - // test getting single fields - verifyEntityInfo(entityId1, entityType1, events1, null, null, null, - store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS))); - - verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2), - null, null, null, store.getEntity(entityId1, entityType1, - EnumSet.of(Field.LAST_EVENT_ONLY))); - - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1, - null)); - - verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null, - store.getEntity(entityId1, entityType1, - EnumSet.of(Field.PRIMARY_FILTERS))); - - verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo, - store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO))); - - verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null, - store.getEntity(entityId2, entityType2, - EnumSet.of(Field.RELATED_ENTITIES))); - } - - protected List getEntities(String entityType) - throws IOException { - return store.getEntities(entityType, null, null, null, null, null, - null, null, null).getEntities(); - } - - protected List getEntitiesWithPrimaryFilter( - String entityType, NameValuePair primaryFilter) throws IOException { - return store.getEntities(entityType, null, null, null, null, null, - primaryFilter, null, null).getEntities(); - } - - protected List getEntitiesFromId(String entityType, - String fromId) throws IOException { - return store.getEntities(entityType, null, null, null, fromId, null, - null, null, null).getEntities(); - } - - protected List getEntitiesFromTs(String entityType, - long fromTs) throws IOException { - return store.getEntities(entityType, null, null, null, null, fromTs, - null, null, null).getEntities(); - } - - protected List getEntitiesFromIdWithPrimaryFilter( - String entityType, NameValuePair primaryFilter, String fromId) - throws IOException { - return store.getEntities(entityType, null, null, null, fromId, null, - primaryFilter, null, null).getEntities(); - } - - protected List getEntitiesFromTsWithPrimaryFilter( - String entityType, NameValuePair primaryFilter, long fromTs) - throws IOException { - return store.getEntities(entityType, null, null, null, null, fromTs, - primaryFilter, null, null).getEntities(); - } - - protected List getEntitiesFromIdWithWindow(String entityType, - Long windowEnd, String fromId) throws IOException { - return store.getEntities(entityType, null, null, windowEnd, fromId, null, - null, null, null).getEntities(); - } - - protected List getEntitiesFromIdWithPrimaryFilterAndWindow( - String entityType, Long windowEnd, String fromId, - NameValuePair primaryFilter) throws IOException { - return store.getEntities(entityType, null, null, windowEnd, fromId, null, - primaryFilter, null, null).getEntities(); - } - - protected List getEntitiesWithFilters(String entityType, - NameValuePair primaryFilter, Collection secondaryFilters) - throws IOException { - return store.getEntities(entityType, null, null, null, null, null, - primaryFilter, secondaryFilters, null).getEntities(); - } - - protected List getEntities(String entityType, Long limit, - Long windowStart, Long windowEnd, NameValuePair primaryFilter, - EnumSet fields) throws IOException { - return store.getEntities(entityType, limit, windowStart, windowEnd, null, - null, primaryFilter, null, fields).getEntities(); - } - - public void testGetEntities() throws IOException { - // test getting entities - assertEquals("nonzero entities size for nonexistent type", 0, - getEntities("type_0").size()); - assertEquals("nonzero entities size for nonexistent type", 0, - getEntities("type_3").size()); - assertEquals("nonzero entities size for nonexistent type", 0, - getEntities("type_6").size()); - assertEquals("nonzero entities size for nonexistent type", 0, - getEntitiesWithPrimaryFilter("type_0", userFilter).size()); - assertEquals("nonzero entities size for nonexistent type", 0, - getEntitiesWithPrimaryFilter("type_3", userFilter).size()); - assertEquals("nonzero entities size for nonexistent type", 0, - getEntitiesWithPrimaryFilter("type_6", userFilter).size()); - - List entities = getEntities("type_1"); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntities("type_2"); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); - - entities = getEntities("type_1", 1l, null, null, null, - EnumSet.allOf(Field.class)); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = getEntities("type_1", 1l, 0l, null, null, - EnumSet.allOf(Field.class)); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = getEntities("type_1", null, 234l, null, null, - EnumSet.allOf(Field.class)); - assertEquals(0, entities.size()); - - entities = getEntities("type_1", null, 123l, null, null, - EnumSet.allOf(Field.class)); - assertEquals(0, entities.size()); - - entities = getEntities("type_1", null, 234l, 345l, null, - EnumSet.allOf(Field.class)); - assertEquals(0, entities.size()); - - entities = getEntities("type_1", null, null, 345l, null, - EnumSet.allOf(Field.class)); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntities("type_1", null, null, 123l, null, - EnumSet.allOf(Field.class)); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - } - - public void testGetEntitiesWithFromId() throws IOException { - List entities = getEntitiesFromId("type_1", entityId1); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesFromId("type_1", entityId1b); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1); - assertEquals(0, entities.size()); - - entities = getEntitiesFromId("type_2", "a"); - assertEquals(0, entities.size()); - - entities = getEntitiesFromId("type_2", entityId2); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); - - entities = getEntitiesFromIdWithWindow("type_2", -456l, null); - assertEquals(0, entities.size()); - - entities = getEntitiesFromIdWithWindow("type_2", -456l, "a"); - assertEquals(0, entities.size()); - - entities = getEntitiesFromIdWithWindow("type_2", 0l, null); - assertEquals(1, entities.size()); - - entities = getEntitiesFromIdWithWindow("type_2", 0l, entityId2); - assertEquals(1, entities.size()); - - // same tests with primary filters - entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, - entityId1); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, - entityId1b); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l, - entityId1, userFilter); - assertEquals(0, entities.size()); - - entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a"); - assertEquals(0, entities.size()); - } - - public void testGetEntitiesWithFromTs() throws IOException { - assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size()); - assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size()); - assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - beforeTs).size()); - long afterTs = System.currentTimeMillis(); - assertEquals(2, getEntitiesFromTs("type_1", afterTs).size()); - assertEquals(1, getEntitiesFromTs("type_2", afterTs).size()); - assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - afterTs).size()); - assertEquals(2, getEntities("type_1").size()); - assertEquals(1, getEntities("type_2").size()); - assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); - // check insert time is not overwritten - long beforeTs = this.beforeTs; - loadTestData(); - assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size()); - assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size()); - assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - beforeTs).size()); - assertEquals(2, getEntitiesFromTs("type_1", afterTs).size()); - assertEquals(1, getEntitiesFromTs("type_2", afterTs).size()); - assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, - afterTs).size()); - } - - public void testGetEntitiesWithPrimaryFilters() throws IOException { - // test using primary filter - assertEquals("nonzero entities size for primary filter", 0, - getEntitiesWithPrimaryFilter("type_1", - new NameValuePair("none", "none")).size()); - assertEquals("nonzero entities size for primary filter", 0, - getEntitiesWithPrimaryFilter("type_2", - new NameValuePair("none", "none")).size()); - assertEquals("nonzero entities size for primary filter", 0, - getEntitiesWithPrimaryFilter("type_3", - new NameValuePair("none", "none")).size()); - - List entities = getEntitiesWithPrimaryFilter("type_1", - userFilter); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesWithPrimaryFilter("type_2", userFilter); - assertEquals(0, entities.size()); - - entities = getEntities("type_1", 1l, null, null, userFilter, null); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = getEntities("type_1", 1l, 0l, null, userFilter, null); - assertEquals(1, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - - entities = getEntities("type_1", null, 234l, null, userFilter, null); - assertEquals(0, entities.size()); - - entities = getEntities("type_1", null, 234l, 345l, userFilter, null); - assertEquals(0, entities.size()); - - entities = getEntities("type_1", null, null, 345l, userFilter, null); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - } - - public void testGetEntitiesWithSecondaryFilters() throws IOException { - // test using secondary filter - List entities = getEntitiesWithFilters("type_1", null, - goodTestingFilters); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesWithFilters("type_1", userFilter, goodTestingFilters); - assertEquals(2, entities.size()); - verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); - verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); - - entities = getEntitiesWithFilters("type_1", null, - Collections.singleton(new NameValuePair("user", "none"))); - assertEquals(0, entities.size()); - - entities = getEntitiesWithFilters("type_1", null, badTestingFilters); - assertEquals(0, entities.size()); - - entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters); - assertEquals(0, entities.size()); - } - - public void testGetEvents() throws IOException { - // test getting entity timelines - SortedSet sortedSet = new TreeSet(); - sortedSet.add(entityId1); - List timelines = - store.getEntityTimelines(entityType1, sortedSet, null, null, - null, null).getAllEvents(); - assertEquals(1, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1); - - sortedSet.add(entityId1b); - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1); - - timelines = store.getEntityTimelines(entityType1, sortedSet, 1l, - null, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - 345l, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - 123l, null, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, 345l, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, 123l, null).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1); - - timelines = store.getEntityTimelines(entityType1, sortedSet, null, - null, null, Collections.singleton("end_event")).getAllEvents(); - assertEquals(2, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); - verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); - - sortedSet.add(entityId2); - timelines = store.getEntityTimelines(entityType2, sortedSet, null, - null, null, null).getAllEvents(); - assertEquals(1, timelines.size()); - verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4); - } - - /** - * Verify a single entity and its start time - */ - protected static void verifyEntityInfo(String entityId, String entityType, - List events, Map> relatedEntities, - Map> primaryFilters, Map otherInfo, - Long startTime, TimelineEntity retrievedEntityInfo) { - - verifyEntityInfo(entityId, entityType, events, relatedEntities, - primaryFilters, otherInfo, retrievedEntityInfo); - assertEquals(startTime, retrievedEntityInfo.getStartTime()); - } - - /** - * Verify a single entity - */ - protected static void verifyEntityInfo(String entityId, String entityType, - List events, Map> relatedEntities, - Map> primaryFilters, Map otherInfo, - TimelineEntity retrievedEntityInfo) { - if (entityId == null) { - assertNull(retrievedEntityInfo); - return; - } - assertEquals(entityId, retrievedEntityInfo.getEntityId()); - assertEquals(entityType, retrievedEntityInfo.getEntityType()); - if (events == null) { - assertNull(retrievedEntityInfo.getEvents()); - } else { - assertEquals(events, retrievedEntityInfo.getEvents()); - } - if (relatedEntities == null) { - assertNull(retrievedEntityInfo.getRelatedEntities()); - } else { - assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities()); - } - if (primaryFilters == null) { - assertNull(retrievedEntityInfo.getPrimaryFilters()); - } else { - assertTrue(primaryFilters.equals( - retrievedEntityInfo.getPrimaryFilters())); - } - if (otherInfo == null) { - assertNull(retrievedEntityInfo.getOtherInfo()); - } else { - assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo())); - } - } - - /** - * Verify timeline events - */ - private static void verifyEntityTimeline( - EventsOfOneEntity retrievedEvents, String entityId, String entityType, - TimelineEvent... actualEvents) { - assertEquals(entityId, retrievedEvents.getEntityId()); - assertEquals(entityType, retrievedEvents.getEntityType()); - assertEquals(actualEvents.length, retrievedEvents.getEvents().size()); - for (int i = 0; i < actualEvents.length; i++) { - assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i)); - } - } - - /** - * Create a test entity - */ - protected static TimelineEntity createEntity(String entityId, String entityType, - Long startTime, List events, - Map> relatedEntities, - Map> primaryFilters, - Map otherInfo) { - TimelineEntity entity = new TimelineEntity(); - entity.setEntityId(entityId); - entity.setEntityType(entityType); - entity.setStartTime(startTime); - entity.setEvents(events); - if (relatedEntities != null) { - for (Entry> e : relatedEntities.entrySet()) { - for (String v : e.getValue()) { - entity.addRelatedEntity(e.getKey(), v); - } - } - } else { - entity.setRelatedEntities(null); - } - entity.setPrimaryFilters(primaryFilters); - entity.setOtherInfo(otherInfo); - return entity; - } - - /** - * Create a test event - */ - private static TimelineEvent createEvent(long timestamp, String type, Map info) { - TimelineEvent event = new TimelineEvent(); - event.setTimestamp(timestamp); - event.setEventType(type); - event.setEventInfo(info); - return event; - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java deleted file mode 100644 index 39102b4..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; -import org.junit.Assert; -import org.junit.Test; - -public class TestTimelineACLsManager { - - @Test - public void testYarnACLsNotEnabled() throws Exception { - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); - TimelineACLsManager timelineACLsManager = - new TimelineACLsManager(conf); - TimelineEntity entity = new TimelineEntity(); - entity.addPrimaryFilter( - TimelineStore.SystemFilter.ENTITY_OWNER - .toString(), "owner"); - Assert.assertTrue( - "Always true when ACLs are not enabled", - timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("user"), entity)); - } - - @Test - public void testYarnACLsEnabled() throws Exception { - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); - conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); - TimelineACLsManager timelineACLsManager = - new TimelineACLsManager(conf); - TimelineEntity entity = new TimelineEntity(); - entity.addPrimaryFilter( - TimelineStore.SystemFilter.ENTITY_OWNER - .toString(), "owner"); - Assert.assertTrue( - "Owner should be allowed to access", - timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("owner"), entity)); - Assert.assertFalse( - "Other shouldn't be allowed to access", - timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("other"), entity)); - Assert.assertTrue( - "Admin should be allowed to access", - timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("admin"), entity)); - } - - @Test - public void testCorruptedOwnerInfo() throws Exception { - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); - conf.set(YarnConfiguration.YARN_ADMIN_ACL, "owner"); - TimelineACLsManager timelineACLsManager = - new TimelineACLsManager(conf); - TimelineEntity entity = new TimelineEntity(); - try { - timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("owner"), entity); - Assert.fail("Exception is expected"); - } catch (YarnException e) { - Assert.assertTrue("It's not the exact expected exception", e.getMessage() - .contains("is corrupted.")); - } - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java deleted file mode 100644 index 7e3e409..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java +++ /dev/null @@ -1,631 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; - -import static org.junit.Assert.assertEquals; - -import java.io.IOException; - -import javax.inject.Singleton; -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; -import javax.ws.rs.core.MediaType; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.security.AdminACLsManager; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; -import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; -import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; -import org.junit.Assert; -import org.junit.Test; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.servlet.GuiceServletContextListener; -import com.google.inject.servlet.ServletModule; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; -import com.sun.jersey.test.framework.JerseyTest; -import com.sun.jersey.test.framework.WebAppDescriptor; - - -public class TestTimelineWebServices extends JerseyTest { - - private static TimelineStore store; - private static TimelineACLsManager timelineACLsManager; - private static AdminACLsManager adminACLsManager; - private static String remoteUser; - private long beforeTime; - - private Injector injector = Guice.createInjector(new ServletModule() { - - @Override - protected void configureServlets() { - bind(YarnJacksonJaxbJsonProvider.class); - bind(TimelineWebServices.class); - bind(GenericExceptionHandler.class); - try{ - store = mockTimelineStore(); - } catch (Exception e) { - Assert.fail(); - } - bind(TimelineStore.class).toInstance(store); - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); - timelineACLsManager = new TimelineACLsManager(conf); - conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); - conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); - adminACLsManager = new AdminACLsManager(conf); - bind(TimelineACLsManager.class).toInstance(timelineACLsManager); - serve("/*").with(GuiceContainer.class); - filter("/*").through(TestFilter.class); - } - - }); - - public class GuiceServletConfig extends GuiceServletContextListener { - - @Override - protected Injector getInjector() { - return injector; - } - } - - private TimelineStore mockTimelineStore() - throws Exception { - beforeTime = System.currentTimeMillis() - 1; - TestMemoryTimelineStore store = - new TestMemoryTimelineStore(); - store.setup(); - return store.getTimelineStore(); - } - - public TestTimelineWebServices() { - super(new WebAppDescriptor.Builder( - "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp") - .contextListenerClass(GuiceServletConfig.class) - .filterClass(com.google.inject.servlet.GuiceFilter.class) - .contextPath("jersey-guice-filter") - .servletPath("/") - .clientConfig(new DefaultClientConfig(YarnJacksonJaxbJsonProvider.class)) - .build()); - } - - @Test - public void testAbout() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelineWebServices.AboutInfo about = - response.getEntity(TimelineWebServices.AboutInfo.class); - Assert.assertNotNull(about); - Assert.assertEquals("Timeline API", about.getAbout()); - } - - private static void verifyEntities(TimelineEntities entities) { - Assert.assertNotNull(entities); - Assert.assertEquals(2, entities.getEntities().size()); - TimelineEntity entity1 = entities.getEntities().get(0); - Assert.assertNotNull(entity1); - Assert.assertEquals("id_1", entity1.getEntityId()); - Assert.assertEquals("type_1", entity1.getEntityType()); - Assert.assertEquals(123l, entity1.getStartTime().longValue()); - Assert.assertEquals(2, entity1.getEvents().size()); - Assert.assertEquals(4, entity1.getPrimaryFilters().size()); - Assert.assertEquals(4, entity1.getOtherInfo().size()); - TimelineEntity entity2 = entities.getEntities().get(1); - Assert.assertNotNull(entity2); - Assert.assertEquals("id_2", entity2.getEntityId()); - Assert.assertEquals("type_1", entity2.getEntityType()); - Assert.assertEquals(123l, entity2.getStartTime().longValue()); - Assert.assertEquals(2, entity2.getEvents().size()); - Assert.assertEquals(4, entity2.getPrimaryFilters().size()); - Assert.assertEquals(4, entity2.getOtherInfo().size()); - } - - @Test - public void testGetEntities() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - verifyEntities(response.getEntity(TimelineEntities.class)); - } - - @Test - public void testFromId() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("fromId", "id_2") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(1, response.getEntity(TimelineEntities.class).getEntities() - .size()); - - response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("fromId", "id_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() - .size()); - } - - @Test - public void testFromTs() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("fromTs", Long.toString(beforeTime)) - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(0, response.getEntity(TimelineEntities.class).getEntities() - .size()); - - response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("fromTs", Long.toString( - System.currentTimeMillis())) - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() - .size()); - } - - @Test - public void testPrimaryFilterString() { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("primaryFilter", "user:username") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - verifyEntities(response.getEntity(TimelineEntities.class)); - } - - @Test - public void testPrimaryFilterInteger() { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("primaryFilter", - "appname:" + Integer.toString(Integer.MAX_VALUE)) - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - verifyEntities(response.getEntity(TimelineEntities.class)); - } - - @Test - public void testPrimaryFilterLong() { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("primaryFilter", - "long:" + Long.toString((long)Integer.MAX_VALUE + 1l)) - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - verifyEntities(response.getEntity(TimelineEntities.class)); - } - - @Test - public void testPrimaryFilterNumericString() { - // without quotes, 123abc is interpreted as the number 123, - // which finds no entities - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("primaryFilter", "other:123abc") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(0, response.getEntity(TimelineEntities.class).getEntities() - .size()); - } - - @Test - public void testPrimaryFilterNumericStringWithQuotes() { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").queryParam("primaryFilter", "other:\"123abc\"") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - verifyEntities(response.getEntity(TimelineEntities.class)); - } - - @Test - public void testSecondaryFilters() { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1") - .queryParam("secondaryFilter", - "user:username,appname:" + Integer.toString(Integer.MAX_VALUE)) - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - verifyEntities(response.getEntity(TimelineEntities.class)); - } - - @Test - public void testGetEntity() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").path("id_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelineEntity entity = response.getEntity(TimelineEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("id_1", entity.getEntityId()); - Assert.assertEquals("type_1", entity.getEntityType()); - Assert.assertEquals(123l, entity.getStartTime().longValue()); - Assert.assertEquals(2, entity.getEvents().size()); - Assert.assertEquals(4, entity.getPrimaryFilters().size()); - Assert.assertEquals(4, entity.getOtherInfo().size()); - } - - @Test - public void testGetEntityFields1() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").path("id_1").queryParam("fields", "events,otherinfo") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelineEntity entity = response.getEntity(TimelineEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("id_1", entity.getEntityId()); - Assert.assertEquals("type_1", entity.getEntityType()); - Assert.assertEquals(123l, entity.getStartTime().longValue()); - Assert.assertEquals(2, entity.getEvents().size()); - Assert.assertEquals(0, entity.getPrimaryFilters().size()); - Assert.assertEquals(4, entity.getOtherInfo().size()); - } - - @Test - public void testGetEntityFields2() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").path("id_1").queryParam("fields", "lasteventonly," + - "primaryfilters,relatedentities") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelineEntity entity = response.getEntity(TimelineEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("id_1", entity.getEntityId()); - Assert.assertEquals("type_1", entity.getEntityType()); - Assert.assertEquals(123l, entity.getStartTime().longValue()); - Assert.assertEquals(1, entity.getEvents().size()); - Assert.assertEquals(4, entity.getPrimaryFilters().size()); - Assert.assertEquals(0, entity.getOtherInfo().size()); - } - - @Test - public void testGetEvents() throws Exception { - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .path("type_1").path("events") - .queryParam("entityId", "id_1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelineEvents events = response.getEntity(TimelineEvents.class); - Assert.assertNotNull(events); - Assert.assertEquals(1, events.getAllEvents().size()); - TimelineEvents.EventsOfOneEntity partEvents = events.getAllEvents().get(0); - Assert.assertEquals(2, partEvents.getEvents().size()); - TimelineEvent event1 = partEvents.getEvents().get(0); - Assert.assertEquals(456l, event1.getTimestamp()); - Assert.assertEquals("end_event", event1.getEventType()); - Assert.assertEquals(1, event1.getEventInfo().size()); - TimelineEvent event2 = partEvents.getEvents().get(1); - Assert.assertEquals(123l, event2.getTimestamp()); - Assert.assertEquals("start_event", event2.getEventType()); - Assert.assertEquals(0, event2.getEventInfo().size()); - } - - @Test - public void testPostEntities() throws Exception { - TimelineEntities entities = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - entity.setEntityId("test id 1"); - entity.setEntityType("test type 1"); - entity.setStartTime(System.currentTimeMillis()); - entities.addEntity(entity); - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class); - Assert.assertNotNull(putResposne); - Assert.assertEquals(0, putResposne.getErrors().size()); - // verify the entity exists in the store - response = r.path("ws").path("v1").path("timeline") - .path("test type 1").path("test id 1") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - entity = response.getEntity(TimelineEntity.class); - Assert.assertNotNull(entity); - Assert.assertEquals("test id 1", entity.getEntityId()); - Assert.assertEquals("test type 1", entity.getEntityType()); - } - - @Test - public void testPostEntitiesWithYarnACLsEnabled() throws Exception { - AdminACLsManager oldAdminACLsManager = - timelineACLsManager.setAdminACLsManager(adminACLsManager); - remoteUser = "tester"; - try { - TimelineEntities entities = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - entity.setEntityId("test id 2"); - entity.setEntityType("test type 2"); - entity.setStartTime(System.currentTimeMillis()); - entities.addEntity(entity); - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelinePutResponse putResponse = response.getEntity(TimelinePutResponse.class); - Assert.assertNotNull(putResponse); - Assert.assertEquals(0, putResponse.getErrors().size()); - - // override/append timeline data in the same entity with different user - remoteUser = "other"; - response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - putResponse = response.getEntity(TimelinePutResponse.class); - Assert.assertNotNull(putResponse); - Assert.assertEquals(1, putResponse.getErrors().size()); - Assert.assertEquals(TimelinePutResponse.TimelinePutError.ACCESS_DENIED, - putResponse.getErrors().get(0).getErrorCode()); - } finally { - timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); - remoteUser = null; - } - } - - @Test - public void testGetEntityWithYarnACLsEnabled() throws Exception { - AdminACLsManager oldAdminACLsManager = - timelineACLsManager.setAdminACLsManager(adminACLsManager); - remoteUser = "tester"; - try { - TimelineEntities entities = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - entity.setEntityId("test id 3"); - entity.setEntityType("test type 3"); - entity.setStartTime(System.currentTimeMillis()); - entities.addEntity(entity); - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - // verify the system data will not be exposed - // 1. No field specification - response = r.path("ws").path("v1").path("timeline") - .path("test type 3").path("test id 3") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - entity = response.getEntity(TimelineEntity.class); - Assert.assertNull(entity.getPrimaryFilters().get( - TimelineStore.SystemFilter.ENTITY_OWNER.toString())); - // 2. other field - response = r.path("ws").path("v1").path("timeline") - .path("test type 3").path("test id 3") - .queryParam("fields", "relatedentities") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - entity = response.getEntity(TimelineEntity.class); - Assert.assertNull(entity.getPrimaryFilters().get( - TimelineStore.SystemFilter.ENTITY_OWNER.toString())); - // 3. primaryfilters field - response = r.path("ws").path("v1").path("timeline") - .path("test type 3").path("test id 3") - .queryParam("fields", "primaryfilters") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - entity = response.getEntity(TimelineEntity.class); - Assert.assertNull(entity.getPrimaryFilters().get( - TimelineStore.SystemFilter.ENTITY_OWNER.toString())); - - // get entity with other user - remoteUser = "other"; - response = r.path("ws").path("v1").path("timeline") - .path("test type 3").path("test id 3") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(ClientResponse.Status.NOT_FOUND, - response.getClientResponseStatus()); - } finally { - timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); - remoteUser = null; - } - } - - @Test - public void testGetEntitiesWithYarnACLsEnabled() { - AdminACLsManager oldAdminACLsManager = - timelineACLsManager.setAdminACLsManager(adminACLsManager); - remoteUser = "tester"; - try { - TimelineEntities entities = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - entity.setEntityId("test id 4"); - entity.setEntityType("test type 4"); - entity.setStartTime(System.currentTimeMillis()); - entities.addEntity(entity); - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - - remoteUser = "other"; - entities = new TimelineEntities(); - entity = new TimelineEntity(); - entity.setEntityId("test id 5"); - entity.setEntityType("test type 4"); - entity.setStartTime(System.currentTimeMillis()); - entities.addEntity(entity); - r = resource(); - response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - - response = r.path("ws").path("v1").path("timeline") - .path("test type 4") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - entities = response.getEntity(TimelineEntities.class); - assertEquals(1, entities.getEntities().size()); - assertEquals("test type 4", entities.getEntities().get(0).getEntityType()); - assertEquals("test id 5", entities.getEntities().get(0).getEntityId()); - } finally { - timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); - remoteUser = null; - } - } - - @Test - public void testGetEventsWithYarnACLsEnabled() { - AdminACLsManager oldAdminACLsManager = - timelineACLsManager.setAdminACLsManager(adminACLsManager); - remoteUser = "tester"; - try { - TimelineEntities entities = new TimelineEntities(); - TimelineEntity entity = new TimelineEntity(); - entity.setEntityId("test id 5"); - entity.setEntityType("test type 5"); - entity.setStartTime(System.currentTimeMillis()); - TimelineEvent event = new TimelineEvent(); - event.setEventType("event type 1"); - event.setTimestamp(System.currentTimeMillis()); - entity.addEvent(event); - entities.addEntity(entity); - WebResource r = resource(); - ClientResponse response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - - remoteUser = "other"; - entities = new TimelineEntities(); - entity = new TimelineEntity(); - entity.setEntityId("test id 6"); - entity.setEntityType("test type 5"); - entity.setStartTime(System.currentTimeMillis()); - event = new TimelineEvent(); - event.setEventType("event type 2"); - event.setTimestamp(System.currentTimeMillis()); - entity.addEvent(event); - entities.addEntity(entity); - r = resource(); - response = r.path("ws").path("v1").path("timeline") - .accept(MediaType.APPLICATION_JSON) - .type(MediaType.APPLICATION_JSON) - .post(ClientResponse.class, entities); - - response = r.path("ws").path("v1").path("timeline") - .path("test type 5").path("events") - .queryParam("entityId", "test id 5,test id 6") - .accept(MediaType.APPLICATION_JSON) - .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - TimelineEvents events = response.getEntity(TimelineEvents.class); - assertEquals(1, events.getAllEvents().size()); - assertEquals("test id 6", events.getAllEvents().get(0).getEntityId()); - } finally { - timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); - remoteUser = null; - } - } - - @Singleton - private static class TestFilter implements Filter { - - @Override - public void init(FilterConfig filterConfig) throws ServletException { - } - - @Override - public void doFilter(ServletRequest request, ServletResponse response, - FilterChain chain) throws IOException, ServletException { - if (request instanceof HttpServletRequest) { - request = - new TestHttpServletRequestWrapper((HttpServletRequest) request); - } - chain.doFilter(request, response); - } - - @Override - public void destroy() { - } - - } - - private static class TestHttpServletRequestWrapper extends HttpServletRequestWrapper { - - public TestHttpServletRequestWrapper(HttpServletRequest request) { - super(request); - } - - @Override - public String getRemoteUser() { - return TestTimelineWebServices.remoteUser; - } - - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestGenericObjectMapper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestGenericObjectMapper.java new file mode 100644 index 0000000..ab09231 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestGenericObjectMapper.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.WritableComparator; +import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class TestGenericObjectMapper { + + @Test + public void testEncoding() { + testEncoding(Long.MAX_VALUE); + testEncoding(Long.MIN_VALUE); + testEncoding(0l); + testEncoding(128l); + testEncoding(256l); + testEncoding(512l); + testEncoding(-256l); + } + + private static void testEncoding(long l) { + byte[] b = GenericObjectMapper.writeReverseOrderedLong(l); + assertEquals("error decoding", l, + GenericObjectMapper.readReverseOrderedLong(b, 0)); + byte[] buf = new byte[16]; + System.arraycopy(b, 0, buf, 5, 8); + assertEquals("error decoding at offset", l, + GenericObjectMapper.readReverseOrderedLong(buf, 5)); + if (l > Long.MIN_VALUE) { + byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1); + assertEquals("error preserving ordering", 1, + WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length)); + } + if (l < Long.MAX_VALUE) { + byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1); + assertEquals("error preserving ordering", 1, + WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length)); + } + } + + private static void verify(Object o) throws IOException { + assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o))); + } + + @Test + public void testValueTypes() throws IOException { + verify(Integer.MAX_VALUE); + verify(Integer.MIN_VALUE); + assertEquals(Integer.MAX_VALUE, GenericObjectMapper.read( + GenericObjectMapper.write((long) Integer.MAX_VALUE))); + assertEquals(Integer.MIN_VALUE, GenericObjectMapper.read( + GenericObjectMapper.write((long) Integer.MIN_VALUE))); + verify((long)Integer.MAX_VALUE + 1l); + verify((long)Integer.MIN_VALUE - 1l); + + verify(Long.MAX_VALUE); + verify(Long.MIN_VALUE); + + assertEquals(42, GenericObjectMapper.read(GenericObjectMapper.write(42l))); + verify(42); + verify(1.23); + verify("abc"); + verify(true); + List list = new ArrayList(); + list.add("123"); + list.add("abc"); + verify(list); + Map map = new HashMap(); + map.put("k1","v1"); + map.put("k2","v2"); + verify(map); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java new file mode 100644 index 0000000..2adfeaf --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java @@ -0,0 +1,267 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timeline; + +import static org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.File; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore; +import org.apache.hadoop.yarn.server.timeline.NameValuePair; +import org.iq80.leveldb.DBIterator; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class TestLeveldbTimelineStore extends TimelineStoreTestUtils { + private FileContext fsContext; + private File fsPath; + + @Before + public void setup() throws Exception { + fsContext = FileContext.getLocalFSFileContext(); + Configuration conf = new YarnConfiguration(); + fsPath = new File("target", this.getClass().getSimpleName() + + "-tmpDir").getAbsoluteFile(); + fsContext.delete(new Path(fsPath.getAbsolutePath()), true); + conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH, + fsPath.getAbsolutePath()); + conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false); + store = new LeveldbTimelineStore(); + store.init(conf); + store.start(); + loadTestData(); + loadVerificationData(); + } + + @After + public void tearDown() throws Exception { + store.stop(); + fsContext.delete(new Path(fsPath.getAbsolutePath()), true); + } + + @Test + public void testRootDirPermission() throws IOException { + FileSystem fs = FileSystem.getLocal(new YarnConfiguration()); + FileStatus file = fs.getFileStatus( + new Path(fsPath.getAbsolutePath(), LeveldbTimelineStore.FILENAME)); + assertNotNull(file); + assertEquals(LeveldbTimelineStore.LEVELDB_DIR_UMASK, file.getPermission()); + } + + @Test + public void testGetSingleEntity() throws IOException { + super.testGetSingleEntity(); + ((LeveldbTimelineStore)store).clearStartTimeCache(); + super.testGetSingleEntity(); + loadTestData(); + } + + @Test + public void testGetEntities() throws IOException { + super.testGetEntities(); + } + + @Test + public void testGetEntitiesWithFromId() throws IOException { + super.testGetEntitiesWithFromId(); + } + + @Test + public void testGetEntitiesWithFromTs() throws IOException { + super.testGetEntitiesWithFromTs(); + } + + @Test + public void testGetEntitiesWithPrimaryFilters() throws IOException { + super.testGetEntitiesWithPrimaryFilters(); + } + + @Test + public void testGetEntitiesWithSecondaryFilters() throws IOException { + super.testGetEntitiesWithSecondaryFilters(); + } + + @Test + public void testGetEvents() throws IOException { + super.testGetEvents(); + } + + @Test + public void testCacheSizes() { + Configuration conf = new Configuration(); + assertEquals(10000, LeveldbTimelineStore.getStartTimeReadCacheSize(conf)); + assertEquals(10000, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf)); + conf.setInt( + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE, + 10001); + assertEquals(10001, LeveldbTimelineStore.getStartTimeReadCacheSize(conf)); + conf = new Configuration(); + conf.setInt( + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE, + 10002); + assertEquals(10002, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf)); + } + + private boolean deleteNextEntity(String entityType, byte[] ts) + throws IOException, InterruptedException { + DBIterator iterator = null; + DBIterator pfIterator = null; + try { + iterator = ((LeveldbTimelineStore)store).getDbIterator(false); + pfIterator = ((LeveldbTimelineStore)store).getDbIterator(false); + return ((LeveldbTimelineStore)store).deleteNextEntity(entityType, ts, + iterator, pfIterator, false); + } finally { + IOUtils.cleanup(null, iterator, pfIterator); + } + } + + @Test + public void testGetEntityTypes() throws IOException { + List entityTypes = ((LeveldbTimelineStore)store).getEntityTypes(); + assertEquals(4, entityTypes.size()); + assertEquals(entityType1, entityTypes.get(0)); + assertEquals(entityType2, entityTypes.get(1)); + assertEquals(entityType4, entityTypes.get(2)); + assertEquals(entityType5, entityTypes.get(3)); + } + + @Test + public void testDeleteEntities() throws IOException, InterruptedException { + assertEquals(2, getEntities("type_1").size()); + assertEquals(1, getEntities("type_2").size()); + + assertEquals(false, deleteNextEntity(entityType1, + writeReverseOrderedLong(122l))); + assertEquals(2, getEntities("type_1").size()); + assertEquals(1, getEntities("type_2").size()); + + assertEquals(true, deleteNextEntity(entityType1, + writeReverseOrderedLong(123l))); + List entities = getEntities("type_2"); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId2, entityType2, events2, Collections.singletonMap( + entityType1, Collections.singleton(entityId1b)), EMPTY_PRIMARY_FILTERS, + EMPTY_MAP, entities.get(0)); + entities = getEntitiesWithPrimaryFilter("type_1", userFilter); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + ((LeveldbTimelineStore)store).discardOldEntities(-123l); + assertEquals(1, getEntities("type_1").size()); + assertEquals(0, getEntities("type_2").size()); + assertEquals(3, ((LeveldbTimelineStore)store).getEntityTypes().size()); + + ((LeveldbTimelineStore)store).discardOldEntities(123l); + assertEquals(0, getEntities("type_1").size()); + assertEquals(0, getEntities("type_2").size()); + assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size()); + assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + } + + @Test + public void testDeleteEntitiesPrimaryFilters() + throws IOException, InterruptedException { + Map> primaryFilter = + Collections.singletonMap("user", Collections.singleton( + (Object) "otheruser")); + TimelineEntities atsEntities = new TimelineEntities(); + atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b, + entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter, + null))); + TimelinePutResponse response = store.put(atsEntities); + assertEquals(0, response.getErrors().size()); + + NameValuePair pfPair = new NameValuePair("user", "otheruser"); + List entities = getEntitiesWithPrimaryFilter("type_1", + pfPair); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2), + EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0)); + + entities = getEntitiesWithPrimaryFilter("type_1", userFilter); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + ((LeveldbTimelineStore)store).discardOldEntities(-123l); + assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size()); + assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + + ((LeveldbTimelineStore)store).discardOldEntities(123l); + assertEquals(0, getEntities("type_1").size()); + assertEquals(0, getEntities("type_2").size()); + assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size()); + + assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size()); + assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + } + + @Test + public void testFromTsWithDeletion() + throws IOException, InterruptedException { + long l = System.currentTimeMillis(); + assertEquals(2, getEntitiesFromTs("type_1", l).size()); + assertEquals(1, getEntitiesFromTs("type_2", l).size()); + assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + l).size()); + ((LeveldbTimelineStore)store).discardOldEntities(123l); + assertEquals(0, getEntitiesFromTs("type_1", l).size()); + assertEquals(0, getEntitiesFromTs("type_2", l).size()); + assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + l).size()); + assertEquals(0, getEntities("type_1").size()); + assertEquals(0, getEntities("type_2").size()); + assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + l).size()); + loadTestData(); + assertEquals(0, getEntitiesFromTs("type_1", l).size()); + assertEquals(0, getEntitiesFromTs("type_2", l).size()); + assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + l).size()); + assertEquals(2, getEntities("type_1").size()); + assertEquals(1, getEntities("type_2").size()); + assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestMemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestMemoryTimelineStore.java new file mode 100644 index 0000000..1953442 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestMemoryTimelineStore.java @@ -0,0 +1,85 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline; + +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; + +public class TestMemoryTimelineStore extends TimelineStoreTestUtils { + + @Before + public void setup() throws Exception { + store = new MemoryTimelineStore(); + store.init(new YarnConfiguration()); + store.start(); + loadTestData(); + loadVerificationData(); + } + + @After + public void tearDown() throws Exception { + store.stop(); + } + + public TimelineStore getTimelineStore() { + return store; + } + + @Test + public void testGetSingleEntity() throws IOException { + super.testGetSingleEntity(); + } + + @Test + public void testGetEntities() throws IOException { + super.testGetEntities(); + } + + @Test + public void testGetEntitiesWithFromId() throws IOException { + super.testGetEntitiesWithFromId(); + } + + @Test + public void testGetEntitiesWithFromTs() throws IOException { + super.testGetEntitiesWithFromTs(); + } + + @Test + public void testGetEntitiesWithPrimaryFilters() throws IOException { + super.testGetEntitiesWithPrimaryFilters(); + } + + @Test + public void testGetEntitiesWithSecondaryFilters() throws IOException { + super.testGetEntitiesWithSecondaryFilters(); + } + + @Test + public void testGetEvents() throws IOException { + super.testGetEvents(); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java new file mode 100644 index 0000000..e8a6d83 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java @@ -0,0 +1,791 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timeline; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; +import org.apache.hadoop.yarn.server.timeline.NameValuePair; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineReader.Field; + +public class TimelineStoreTestUtils { + + protected static final List EMPTY_EVENTS = + Collections.emptyList(); + protected static final Map EMPTY_MAP = + Collections.emptyMap(); + protected static final Map> EMPTY_PRIMARY_FILTERS = + Collections.emptyMap(); + protected static final Map> EMPTY_REL_ENTITIES = + Collections.emptyMap(); + + protected TimelineStore store; + protected String entityId1; + protected String entityType1; + protected String entityId1b; + protected String entityId2; + protected String entityType2; + protected String entityId4; + protected String entityType4; + protected String entityId5; + protected String entityType5; + protected Map> primaryFilters; + protected Map secondaryFilters; + protected Map allFilters; + protected Map otherInfo; + protected Map> relEntityMap; + protected Map> relEntityMap2; + protected NameValuePair userFilter; + protected NameValuePair numericFilter1; + protected NameValuePair numericFilter2; + protected NameValuePair numericFilter3; + protected Collection goodTestingFilters; + protected Collection badTestingFilters; + protected TimelineEvent ev1; + protected TimelineEvent ev2; + protected TimelineEvent ev3; + protected TimelineEvent ev4; + protected Map eventInfo; + protected List events1; + protected List events2; + protected long beforeTs; + + /** + * Load test data into the given store + */ + protected void loadTestData() throws IOException { + beforeTs = System.currentTimeMillis()-1; + TimelineEntities entities = new TimelineEntities(); + Map> primaryFilters = + new HashMap>(); + Set l1 = new HashSet(); + l1.add("username"); + Set l2 = new HashSet(); + l2.add((long)Integer.MAX_VALUE); + Set l3 = new HashSet(); + l3.add("123abc"); + Set l4 = new HashSet(); + l4.add((long)Integer.MAX_VALUE + 1l); + primaryFilters.put("user", l1); + primaryFilters.put("appname", l2); + primaryFilters.put("other", l3); + primaryFilters.put("long", l4); + Map secondaryFilters = new HashMap(); + secondaryFilters.put("startTime", 123456l); + secondaryFilters.put("status", "RUNNING"); + Map otherInfo1 = new HashMap(); + otherInfo1.put("info1", "val1"); + otherInfo1.putAll(secondaryFilters); + + String entityId1 = "id_1"; + String entityType1 = "type_1"; + String entityId1b = "id_2"; + String entityId2 = "id_2"; + String entityType2 = "type_2"; + String entityId4 = "id_4"; + String entityType4 = "type_4"; + String entityId5 = "id_5"; + String entityType5 = "type_5"; + + Map> relatedEntities = + new HashMap>(); + relatedEntities.put(entityType2, Collections.singleton(entityId2)); + + TimelineEvent ev3 = createEvent(789l, "launch_event", null); + TimelineEvent ev4 = createEvent(-123l, "init_event", null); + List events = new ArrayList(); + events.add(ev3); + events.add(ev4); + entities.setEntities(Collections.singletonList(createEntity(entityId2, + entityType2, null, events, null, null, null))); + TimelinePutResponse response = store.put(entities); + assertEquals(0, response.getErrors().size()); + + TimelineEvent ev1 = createEvent(123l, "start_event", null); + entities.setEntities(Collections.singletonList(createEntity(entityId1, + entityType1, 123l, Collections.singletonList(ev1), + relatedEntities, primaryFilters, otherInfo1))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + entities.setEntities(Collections.singletonList(createEntity(entityId1b, + entityType1, null, Collections.singletonList(ev1), relatedEntities, + primaryFilters, otherInfo1))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + + Map eventInfo = new HashMap(); + eventInfo.put("event info 1", "val1"); + TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo); + Map otherInfo2 = new HashMap(); + otherInfo2.put("info2", "val2"); + entities.setEntities(Collections.singletonList(createEntity(entityId1, + entityType1, null, Collections.singletonList(ev2), null, + primaryFilters, otherInfo2))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + entities.setEntities(Collections.singletonList(createEntity(entityId1b, + entityType1, 789l, Collections.singletonList(ev2), null, + primaryFilters, otherInfo2))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + + entities.setEntities(Collections.singletonList(createEntity( + "badentityid", "badentity", null, null, null, null, otherInfo1))); + response = store.put(entities); + assertEquals(1, response.getErrors().size()); + TimelinePutError error = response.getErrors().get(0); + assertEquals("badentityid", error.getEntityId()); + assertEquals("badentity", error.getEntityType()); + assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode()); + + relatedEntities.clear(); + relatedEntities.put(entityType5, Collections.singleton(entityId5)); + entities.setEntities(Collections.singletonList(createEntity(entityId4, + entityType4, 42l, null, relatedEntities, null, null))); + response = store.put(entities); + assertEquals(0, response.getErrors().size()); + } + + /** + * Load verification data + */ + protected void loadVerificationData() throws Exception { + userFilter = new NameValuePair("user", "username"); + numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE); + numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l); + numericFilter3 = new NameValuePair("other", "123abc"); + goodTestingFilters = new ArrayList(); + goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE)); + goodTestingFilters.add(new NameValuePair("status", "RUNNING")); + badTestingFilters = new ArrayList(); + badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE)); + badTestingFilters.add(new NameValuePair("status", "FINISHED")); + + primaryFilters = new HashMap>(); + Set l1 = new HashSet(); + l1.add("username"); + Set l2 = new HashSet(); + l2.add(Integer.MAX_VALUE); + Set l3 = new HashSet(); + l3.add("123abc"); + Set l4 = new HashSet(); + l4.add((long)Integer.MAX_VALUE + 1l); + primaryFilters.put("user", l1); + primaryFilters.put("appname", l2); + primaryFilters.put("other", l3); + primaryFilters.put("long", l4); + secondaryFilters = new HashMap(); + secondaryFilters.put("startTime", 123456); + secondaryFilters.put("status", "RUNNING"); + allFilters = new HashMap(); + allFilters.putAll(secondaryFilters); + for (Entry> pf : primaryFilters.entrySet()) { + for (Object o : pf.getValue()) { + allFilters.put(pf.getKey(), o); + } + } + otherInfo = new HashMap(); + otherInfo.put("info1", "val1"); + otherInfo.put("info2", "val2"); + otherInfo.putAll(secondaryFilters); + + entityId1 = "id_1"; + entityType1 = "type_1"; + entityId1b = "id_2"; + entityId2 = "id_2"; + entityType2 = "type_2"; + entityId4 = "id_4"; + entityType4 = "type_4"; + entityId5 = "id_5"; + entityType5 = "type_5"; + + ev1 = createEvent(123l, "start_event", null); + + eventInfo = new HashMap(); + eventInfo.put("event info 1", "val1"); + ev2 = createEvent(456l, "end_event", eventInfo); + events1 = new ArrayList(); + events1.add(ev2); + events1.add(ev1); + + relEntityMap = + new HashMap>(); + Set ids = new HashSet(); + ids.add(entityId1); + ids.add(entityId1b); + relEntityMap.put(entityType1, ids); + + relEntityMap2 = + new HashMap>(); + relEntityMap2.put(entityType4, Collections.singleton(entityId4)); + + ev3 = createEvent(789l, "launch_event", null); + ev4 = createEvent(-123l, "init_event", null); + events2 = new ArrayList(); + events2.add(ev3); + events2.add(ev4); + } + + public void testGetSingleEntity() throws IOException { + // test getting entity info + verifyEntityInfo(null, null, null, null, null, null, + store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, 123l, store.getEntity(entityId1, + entityType1, EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, 123l, store.getEntity(entityId1b, + entityType1, EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2, + entityType2, EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4, + entityType4, EnumSet.allOf(Field.class))); + + verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5, + entityType5, EnumSet.allOf(Field.class))); + + // test getting single fields + verifyEntityInfo(entityId1, entityType1, events1, null, null, null, + store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS))); + + verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2), + null, null, null, store.getEntity(entityId1, entityType1, + EnumSet.of(Field.LAST_EVENT_ONLY))); + + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1, + null)); + + verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null, + store.getEntity(entityId1, entityType1, + EnumSet.of(Field.PRIMARY_FILTERS))); + + verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo, + store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO))); + + verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null, + store.getEntity(entityId2, entityType2, + EnumSet.of(Field.RELATED_ENTITIES))); + } + + protected List getEntities(String entityType) + throws IOException { + return store.getEntities(entityType, null, null, null, null, null, + null, null, null).getEntities(); + } + + protected List getEntitiesWithPrimaryFilter( + String entityType, NameValuePair primaryFilter) throws IOException { + return store.getEntities(entityType, null, null, null, null, null, + primaryFilter, null, null).getEntities(); + } + + protected List getEntitiesFromId(String entityType, + String fromId) throws IOException { + return store.getEntities(entityType, null, null, null, fromId, null, + null, null, null).getEntities(); + } + + protected List getEntitiesFromTs(String entityType, + long fromTs) throws IOException { + return store.getEntities(entityType, null, null, null, null, fromTs, + null, null, null).getEntities(); + } + + protected List getEntitiesFromIdWithPrimaryFilter( + String entityType, NameValuePair primaryFilter, String fromId) + throws IOException { + return store.getEntities(entityType, null, null, null, fromId, null, + primaryFilter, null, null).getEntities(); + } + + protected List getEntitiesFromTsWithPrimaryFilter( + String entityType, NameValuePair primaryFilter, long fromTs) + throws IOException { + return store.getEntities(entityType, null, null, null, null, fromTs, + primaryFilter, null, null).getEntities(); + } + + protected List getEntitiesFromIdWithWindow(String entityType, + Long windowEnd, String fromId) throws IOException { + return store.getEntities(entityType, null, null, windowEnd, fromId, null, + null, null, null).getEntities(); + } + + protected List getEntitiesFromIdWithPrimaryFilterAndWindow( + String entityType, Long windowEnd, String fromId, + NameValuePair primaryFilter) throws IOException { + return store.getEntities(entityType, null, null, windowEnd, fromId, null, + primaryFilter, null, null).getEntities(); + } + + protected List getEntitiesWithFilters(String entityType, + NameValuePair primaryFilter, Collection secondaryFilters) + throws IOException { + return store.getEntities(entityType, null, null, null, null, null, + primaryFilter, secondaryFilters, null).getEntities(); + } + + protected List getEntities(String entityType, Long limit, + Long windowStart, Long windowEnd, NameValuePair primaryFilter, + EnumSet fields) throws IOException { + return store.getEntities(entityType, limit, windowStart, windowEnd, null, + null, primaryFilter, null, fields).getEntities(); + } + + public void testGetEntities() throws IOException { + // test getting entities + assertEquals("nonzero entities size for nonexistent type", 0, + getEntities("type_0").size()); + assertEquals("nonzero entities size for nonexistent type", 0, + getEntities("type_3").size()); + assertEquals("nonzero entities size for nonexistent type", 0, + getEntities("type_6").size()); + assertEquals("nonzero entities size for nonexistent type", 0, + getEntitiesWithPrimaryFilter("type_0", userFilter).size()); + assertEquals("nonzero entities size for nonexistent type", 0, + getEntitiesWithPrimaryFilter("type_3", userFilter).size()); + assertEquals("nonzero entities size for nonexistent type", 0, + getEntitiesWithPrimaryFilter("type_6", userFilter).size()); + + List entities = getEntities("type_1"); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntities("type_2"); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); + + entities = getEntities("type_1", 1l, null, null, null, + EnumSet.allOf(Field.class)); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = getEntities("type_1", 1l, 0l, null, null, + EnumSet.allOf(Field.class)); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = getEntities("type_1", null, 234l, null, null, + EnumSet.allOf(Field.class)); + assertEquals(0, entities.size()); + + entities = getEntities("type_1", null, 123l, null, null, + EnumSet.allOf(Field.class)); + assertEquals(0, entities.size()); + + entities = getEntities("type_1", null, 234l, 345l, null, + EnumSet.allOf(Field.class)); + assertEquals(0, entities.size()); + + entities = getEntities("type_1", null, null, 345l, null, + EnumSet.allOf(Field.class)); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntities("type_1", null, null, 123l, null, + EnumSet.allOf(Field.class)); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + } + + public void testGetEntitiesWithFromId() throws IOException { + List entities = getEntitiesFromId("type_1", entityId1); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesFromId("type_1", entityId1b); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1); + assertEquals(0, entities.size()); + + entities = getEntitiesFromId("type_2", "a"); + assertEquals(0, entities.size()); + + entities = getEntitiesFromId("type_2", entityId2); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); + + entities = getEntitiesFromIdWithWindow("type_2", -456l, null); + assertEquals(0, entities.size()); + + entities = getEntitiesFromIdWithWindow("type_2", -456l, "a"); + assertEquals(0, entities.size()); + + entities = getEntitiesFromIdWithWindow("type_2", 0l, null); + assertEquals(1, entities.size()); + + entities = getEntitiesFromIdWithWindow("type_2", 0l, entityId2); + assertEquals(1, entities.size()); + + // same tests with primary filters + entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, + entityId1); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, + entityId1b); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l, + entityId1, userFilter); + assertEquals(0, entities.size()); + + entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a"); + assertEquals(0, entities.size()); + } + + public void testGetEntitiesWithFromTs() throws IOException { + assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size()); + assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size()); + assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + beforeTs).size()); + long afterTs = System.currentTimeMillis(); + assertEquals(2, getEntitiesFromTs("type_1", afterTs).size()); + assertEquals(1, getEntitiesFromTs("type_2", afterTs).size()); + assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + afterTs).size()); + assertEquals(2, getEntities("type_1").size()); + assertEquals(1, getEntities("type_2").size()); + assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + // check insert time is not overwritten + long beforeTs = this.beforeTs; + loadTestData(); + assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size()); + assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size()); + assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + beforeTs).size()); + assertEquals(2, getEntitiesFromTs("type_1", afterTs).size()); + assertEquals(1, getEntitiesFromTs("type_2", afterTs).size()); + assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + afterTs).size()); + } + + public void testGetEntitiesWithPrimaryFilters() throws IOException { + // test using primary filter + assertEquals("nonzero entities size for primary filter", 0, + getEntitiesWithPrimaryFilter("type_1", + new NameValuePair("none", "none")).size()); + assertEquals("nonzero entities size for primary filter", 0, + getEntitiesWithPrimaryFilter("type_2", + new NameValuePair("none", "none")).size()); + assertEquals("nonzero entities size for primary filter", 0, + getEntitiesWithPrimaryFilter("type_3", + new NameValuePair("none", "none")).size()); + + List entities = getEntitiesWithPrimaryFilter("type_1", + userFilter); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesWithPrimaryFilter("type_2", userFilter); + assertEquals(0, entities.size()); + + entities = getEntities("type_1", 1l, null, null, userFilter, null); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = getEntities("type_1", 1l, 0l, null, userFilter, null); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + + entities = getEntities("type_1", null, 234l, null, userFilter, null); + assertEquals(0, entities.size()); + + entities = getEntities("type_1", null, 234l, 345l, userFilter, null); + assertEquals(0, entities.size()); + + entities = getEntities("type_1", null, null, 345l, userFilter, null); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + } + + public void testGetEntitiesWithSecondaryFilters() throws IOException { + // test using secondary filter + List entities = getEntitiesWithFilters("type_1", null, + goodTestingFilters); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesWithFilters("type_1", userFilter, goodTestingFilters); + assertEquals(2, entities.size()); + verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0)); + verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1)); + + entities = getEntitiesWithFilters("type_1", null, + Collections.singleton(new NameValuePair("user", "none"))); + assertEquals(0, entities.size()); + + entities = getEntitiesWithFilters("type_1", null, badTestingFilters); + assertEquals(0, entities.size()); + + entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters); + assertEquals(0, entities.size()); + } + + public void testGetEvents() throws IOException { + // test getting entity timelines + SortedSet sortedSet = new TreeSet(); + sortedSet.add(entityId1); + List timelines = + store.getEntityTimelines(entityType1, sortedSet, null, null, + null, null).getAllEvents(); + assertEquals(1, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1); + + sortedSet.add(entityId1b); + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1); + + timelines = store.getEntityTimelines(entityType1, sortedSet, 1l, + null, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + 345l, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + 123l, null, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, 345l, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, 123l, null).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1); + + timelines = store.getEntityTimelines(entityType1, sortedSet, null, + null, null, Collections.singleton("end_event")).getAllEvents(); + assertEquals(2, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2); + verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2); + + sortedSet.add(entityId2); + timelines = store.getEntityTimelines(entityType2, sortedSet, null, + null, null, null).getAllEvents(); + assertEquals(1, timelines.size()); + verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4); + } + + /** + * Verify a single entity and its start time + */ + protected static void verifyEntityInfo(String entityId, String entityType, + List events, Map> relatedEntities, + Map> primaryFilters, Map otherInfo, + Long startTime, TimelineEntity retrievedEntityInfo) { + + verifyEntityInfo(entityId, entityType, events, relatedEntities, + primaryFilters, otherInfo, retrievedEntityInfo); + assertEquals(startTime, retrievedEntityInfo.getStartTime()); + } + + /** + * Verify a single entity + */ + protected static void verifyEntityInfo(String entityId, String entityType, + List events, Map> relatedEntities, + Map> primaryFilters, Map otherInfo, + TimelineEntity retrievedEntityInfo) { + if (entityId == null) { + assertNull(retrievedEntityInfo); + return; + } + assertEquals(entityId, retrievedEntityInfo.getEntityId()); + assertEquals(entityType, retrievedEntityInfo.getEntityType()); + if (events == null) { + assertNull(retrievedEntityInfo.getEvents()); + } else { + assertEquals(events, retrievedEntityInfo.getEvents()); + } + if (relatedEntities == null) { + assertNull(retrievedEntityInfo.getRelatedEntities()); + } else { + assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities()); + } + if (primaryFilters == null) { + assertNull(retrievedEntityInfo.getPrimaryFilters()); + } else { + assertTrue(primaryFilters.equals( + retrievedEntityInfo.getPrimaryFilters())); + } + if (otherInfo == null) { + assertNull(retrievedEntityInfo.getOtherInfo()); + } else { + assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo())); + } + } + + /** + * Verify timeline events + */ + private static void verifyEntityTimeline( + EventsOfOneEntity retrievedEvents, String entityId, String entityType, + TimelineEvent... actualEvents) { + assertEquals(entityId, retrievedEvents.getEntityId()); + assertEquals(entityType, retrievedEvents.getEntityType()); + assertEquals(actualEvents.length, retrievedEvents.getEvents().size()); + for (int i = 0; i < actualEvents.length; i++) { + assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i)); + } + } + + /** + * Create a test entity + */ + protected static TimelineEntity createEntity(String entityId, String entityType, + Long startTime, List events, + Map> relatedEntities, + Map> primaryFilters, + Map otherInfo) { + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId(entityId); + entity.setEntityType(entityType); + entity.setStartTime(startTime); + entity.setEvents(events); + if (relatedEntities != null) { + for (Entry> e : relatedEntities.entrySet()) { + for (String v : e.getValue()) { + entity.addRelatedEntity(e.getKey(), v); + } + } + } else { + entity.setRelatedEntities(null); + } + entity.setPrimaryFilters(primaryFilters); + entity.setOtherInfo(otherInfo); + return entity; + } + + /** + * Create a test event + */ + private static TimelineEvent createEvent(long timestamp, String type, Map info) { + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(timestamp); + event.setEventType(type); + event.setEventInfo(info); + return event; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java new file mode 100644 index 0000000..5825e7e --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; +import org.junit.Assert; +import org.junit.Test; + +public class TestTimelineACLsManager { + + @Test + public void testYarnACLsNotEnabled() throws Exception { + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); + TimelineACLsManager timelineACLsManager = + new TimelineACLsManager(conf); + TimelineEntity entity = new TimelineEntity(); + entity.addPrimaryFilter( + TimelineStore.SystemFilter.ENTITY_OWNER + .toString(), "owner"); + Assert.assertTrue( + "Always true when ACLs are not enabled", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("user"), entity)); + } + + @Test + public void testYarnACLsEnabled() throws Exception { + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); + TimelineACLsManager timelineACLsManager = + new TimelineACLsManager(conf); + TimelineEntity entity = new TimelineEntity(); + entity.addPrimaryFilter( + TimelineStore.SystemFilter.ENTITY_OWNER + .toString(), "owner"); + Assert.assertTrue( + "Owner should be allowed to access", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("owner"), entity)); + Assert.assertFalse( + "Other shouldn't be allowed to access", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("other"), entity)); + Assert.assertTrue( + "Admin should be allowed to access", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("admin"), entity)); + } + + @Test + public void testCorruptedOwnerInfo() throws Exception { + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + conf.set(YarnConfiguration.YARN_ADMIN_ACL, "owner"); + TimelineACLsManager timelineACLsManager = + new TimelineACLsManager(conf); + TimelineEntity entity = new TimelineEntity(); + try { + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("owner"), entity); + Assert.fail("Exception is expected"); + } catch (YarnException e) { + Assert.assertTrue("It's not the exact expected exception", e.getMessage() + .contains("is corrupted.")); + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java new file mode 100644 index 0000000..24cc5ca --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java @@ -0,0 +1,632 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.webapp; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import javax.inject.Singleton; +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; +import javax.ws.rs.core.MediaType; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.AdminACLsManager; +import org.apache.hadoop.yarn.server.timeline.TestMemoryTimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; +import org.apache.hadoop.yarn.server.timeline.webapp.TimelineWebServices.AboutInfo; +import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.junit.Assert; +import org.junit.Test; + +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.servlet.GuiceServletContextListener; +import com.google.inject.servlet.ServletModule; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; +import com.sun.jersey.test.framework.JerseyTest; +import com.sun.jersey.test.framework.WebAppDescriptor; + + +public class TestTimelineWebServices extends JerseyTest { + + private static TimelineStore store; + private static TimelineACLsManager timelineACLsManager; + private static AdminACLsManager adminACLsManager; + private static String remoteUser; + private long beforeTime; + + private Injector injector = Guice.createInjector(new ServletModule() { + + @Override + protected void configureServlets() { + bind(YarnJacksonJaxbJsonProvider.class); + bind(TimelineWebServices.class); + bind(GenericExceptionHandler.class); + try{ + store = mockTimelineStore(); + } catch (Exception e) { + Assert.fail(); + } + bind(TimelineStore.class).toInstance(store); + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); + timelineACLsManager = new TimelineACLsManager(conf); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); + adminACLsManager = new AdminACLsManager(conf); + bind(TimelineACLsManager.class).toInstance(timelineACLsManager); + serve("/*").with(GuiceContainer.class); + filter("/*").through(TestFilter.class); + } + + }); + + public class GuiceServletConfig extends GuiceServletContextListener { + + @Override + protected Injector getInjector() { + return injector; + } + } + + private TimelineStore mockTimelineStore() + throws Exception { + beforeTime = System.currentTimeMillis() - 1; + TestMemoryTimelineStore store = + new TestMemoryTimelineStore(); + store.setup(); + return store.getTimelineStore(); + } + + public TestTimelineWebServices() { + super(new WebAppDescriptor.Builder( + "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp") + .contextListenerClass(GuiceServletConfig.class) + .filterClass(com.google.inject.servlet.GuiceFilter.class) + .contextPath("jersey-guice-filter") + .servletPath("/") + .clientConfig(new DefaultClientConfig(YarnJacksonJaxbJsonProvider.class)) + .build()); + } + + @Test + public void testAbout() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineWebServices.AboutInfo about = + response.getEntity(TimelineWebServices.AboutInfo.class); + Assert.assertNotNull(about); + Assert.assertEquals("Timeline API", about.getAbout()); + } + + private static void verifyEntities(TimelineEntities entities) { + Assert.assertNotNull(entities); + Assert.assertEquals(2, entities.getEntities().size()); + TimelineEntity entity1 = entities.getEntities().get(0); + Assert.assertNotNull(entity1); + Assert.assertEquals("id_1", entity1.getEntityId()); + Assert.assertEquals("type_1", entity1.getEntityType()); + Assert.assertEquals(123l, entity1.getStartTime().longValue()); + Assert.assertEquals(2, entity1.getEvents().size()); + Assert.assertEquals(4, entity1.getPrimaryFilters().size()); + Assert.assertEquals(4, entity1.getOtherInfo().size()); + TimelineEntity entity2 = entities.getEntities().get(1); + Assert.assertNotNull(entity2); + Assert.assertEquals("id_2", entity2.getEntityId()); + Assert.assertEquals("type_1", entity2.getEntityType()); + Assert.assertEquals(123l, entity2.getStartTime().longValue()); + Assert.assertEquals(2, entity2.getEvents().size()); + Assert.assertEquals(4, entity2.getPrimaryFilters().size()); + Assert.assertEquals(4, entity2.getOtherInfo().size()); + } + + @Test + public void testGetEntities() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + verifyEntities(response.getEntity(TimelineEntities.class)); + } + + @Test + public void testFromId() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("fromId", "id_2") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(1, response.getEntity(TimelineEntities.class).getEntities() + .size()); + + response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("fromId", "id_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() + .size()); + } + + @Test + public void testFromTs() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("fromTs", Long.toString(beforeTime)) + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(0, response.getEntity(TimelineEntities.class).getEntities() + .size()); + + response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("fromTs", Long.toString( + System.currentTimeMillis())) + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() + .size()); + } + + @Test + public void testPrimaryFilterString() { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("primaryFilter", "user:username") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + verifyEntities(response.getEntity(TimelineEntities.class)); + } + + @Test + public void testPrimaryFilterInteger() { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("primaryFilter", + "appname:" + Integer.toString(Integer.MAX_VALUE)) + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + verifyEntities(response.getEntity(TimelineEntities.class)); + } + + @Test + public void testPrimaryFilterLong() { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("primaryFilter", + "long:" + Long.toString((long)Integer.MAX_VALUE + 1l)) + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + verifyEntities(response.getEntity(TimelineEntities.class)); + } + + @Test + public void testPrimaryFilterNumericString() { + // without quotes, 123abc is interpreted as the number 123, + // which finds no entities + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("primaryFilter", "other:123abc") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(0, response.getEntity(TimelineEntities.class).getEntities() + .size()); + } + + @Test + public void testPrimaryFilterNumericStringWithQuotes() { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").queryParam("primaryFilter", "other:\"123abc\"") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + verifyEntities(response.getEntity(TimelineEntities.class)); + } + + @Test + public void testSecondaryFilters() { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1") + .queryParam("secondaryFilter", + "user:username,appname:" + Integer.toString(Integer.MAX_VALUE)) + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + verifyEntities(response.getEntity(TimelineEntities.class)); + } + + @Test + public void testGetEntity() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("id_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntity entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(2, entity.getEvents().size()); + Assert.assertEquals(4, entity.getPrimaryFilters().size()); + Assert.assertEquals(4, entity.getOtherInfo().size()); + } + + @Test + public void testGetEntityFields1() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("id_1").queryParam("fields", "events,otherinfo") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntity entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(2, entity.getEvents().size()); + Assert.assertEquals(0, entity.getPrimaryFilters().size()); + Assert.assertEquals(4, entity.getOtherInfo().size()); + } + + @Test + public void testGetEntityFields2() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("id_1").queryParam("fields", "lasteventonly," + + "primaryfilters,relatedentities") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEntity entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(1, entity.getEvents().size()); + Assert.assertEquals(4, entity.getPrimaryFilters().size()); + Assert.assertEquals(0, entity.getOtherInfo().size()); + } + + @Test + public void testGetEvents() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .path("type_1").path("events") + .queryParam("entityId", "id_1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEvents events = response.getEntity(TimelineEvents.class); + Assert.assertNotNull(events); + Assert.assertEquals(1, events.getAllEvents().size()); + TimelineEvents.EventsOfOneEntity partEvents = events.getAllEvents().get(0); + Assert.assertEquals(2, partEvents.getEvents().size()); + TimelineEvent event1 = partEvents.getEvents().get(0); + Assert.assertEquals(456l, event1.getTimestamp()); + Assert.assertEquals("end_event", event1.getEventType()); + Assert.assertEquals(1, event1.getEventInfo().size()); + TimelineEvent event2 = partEvents.getEvents().get(1); + Assert.assertEquals(123l, event2.getTimestamp()); + Assert.assertEquals("start_event", event2.getEventType()); + Assert.assertEquals(0, event2.getEventInfo().size()); + } + + @Test + public void testPostEntities() throws Exception { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 1"); + entity.setEntityType("test type 1"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResposne); + Assert.assertEquals(0, putResposne.getErrors().size()); + // verify the entity exists in the store + response = r.path("ws").path("v1").path("timeline") + .path("test type 1").path("test id 1") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("test id 1", entity.getEntityId()); + Assert.assertEquals("test type 1", entity.getEntityType()); + } + + @Test + public void testPostEntitiesWithYarnACLsEnabled() throws Exception { + AdminACLsManager oldAdminACLsManager = + timelineACLsManager.setAdminACLsManager(adminACLsManager); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 2"); + entity.setEntityType("test type 2"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelinePutResponse putResponse = response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResponse); + Assert.assertEquals(0, putResponse.getErrors().size()); + + // override/append timeline data in the same entity with different user + remoteUser = "other"; + response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + putResponse = response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResponse); + Assert.assertEquals(1, putResponse.getErrors().size()); + Assert.assertEquals(TimelinePutResponse.TimelinePutError.ACCESS_DENIED, + putResponse.getErrors().get(0).getErrorCode()); + } finally { + timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); + remoteUser = null; + } + } + + @Test + public void testGetEntityWithYarnACLsEnabled() throws Exception { + AdminACLsManager oldAdminACLsManager = + timelineACLsManager.setAdminACLsManager(adminACLsManager); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 3"); + entity.setEntityType("test type 3"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + // verify the system data will not be exposed + // 1. No field specification + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNull(entity.getPrimaryFilters().get( + TimelineStore.SystemFilter.ENTITY_OWNER.toString())); + // 2. other field + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .queryParam("fields", "relatedentities") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNull(entity.getPrimaryFilters().get( + TimelineStore.SystemFilter.ENTITY_OWNER.toString())); + // 3. primaryfilters field + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .queryParam("fields", "primaryfilters") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNull(entity.getPrimaryFilters().get( + TimelineStore.SystemFilter.ENTITY_OWNER.toString())); + + // get entity with other user + remoteUser = "other"; + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(ClientResponse.Status.NOT_FOUND, + response.getClientResponseStatus()); + } finally { + timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); + remoteUser = null; + } + } + + @Test + public void testGetEntitiesWithYarnACLsEnabled() { + AdminACLsManager oldAdminACLsManager = + timelineACLsManager.setAdminACLsManager(adminACLsManager); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 4"); + entity.setEntityType("test type 4"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + remoteUser = "other"; + entities = new TimelineEntities(); + entity = new TimelineEntity(); + entity.setEntityId("test id 5"); + entity.setEntityType("test type 4"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + r = resource(); + response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + response = r.path("ws").path("v1").path("timeline") + .path("test type 4") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entities = response.getEntity(TimelineEntities.class); + assertEquals(1, entities.getEntities().size()); + assertEquals("test type 4", entities.getEntities().get(0).getEntityType()); + assertEquals("test id 5", entities.getEntities().get(0).getEntityId()); + } finally { + timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); + remoteUser = null; + } + } + + @Test + public void testGetEventsWithYarnACLsEnabled() { + AdminACLsManager oldAdminACLsManager = + timelineACLsManager.setAdminACLsManager(adminACLsManager); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 5"); + entity.setEntityType("test type 5"); + entity.setStartTime(System.currentTimeMillis()); + TimelineEvent event = new TimelineEvent(); + event.setEventType("event type 1"); + event.setTimestamp(System.currentTimeMillis()); + entity.addEvent(event); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + remoteUser = "other"; + entities = new TimelineEntities(); + entity = new TimelineEntity(); + entity.setEntityId("test id 6"); + entity.setEntityType("test type 5"); + entity.setStartTime(System.currentTimeMillis()); + event = new TimelineEvent(); + event.setEventType("event type 2"); + event.setTimestamp(System.currentTimeMillis()); + entity.addEvent(event); + entities.addEntity(entity); + r = resource(); + response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + response = r.path("ws").path("v1").path("timeline") + .path("test type 5").path("events") + .queryParam("entityId", "test id 5,test id 6") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEvents events = response.getEntity(TimelineEvents.class); + assertEquals(1, events.getAllEvents().size()); + assertEquals("test id 6", events.getAllEvents().get(0).getEntityId()); + } finally { + timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); + remoteUser = null; + } + } + + @Singleton + private static class TestFilter implements Filter { + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + if (request instanceof HttpServletRequest) { + request = + new TestHttpServletRequestWrapper((HttpServletRequest) request); + } + chain.doFilter(request, response); + } + + @Override + public void destroy() { + } + + } + + private static class TestHttpServletRequestWrapper extends HttpServletRequestWrapper { + + public TestHttpServletRequestWrapper(HttpServletRequest request) { + super(request); + } + + @Override + public String getRemoteUser() { + return TestTimelineWebServices.remoteUser; + } + + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index 6e0ed02..2f35188 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -25,6 +25,7 @@ import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -56,8 +57,6 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore; import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; @@ -70,6 +69,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRegistrationEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent; +import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import com.google.common.annotations.VisibleForTesting; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm index 3c1169b..58f3c62 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm @@ -152,7 +152,7 @@ YARN Timeline Server Store class name for timeline store. yarn.timeline-service.store-class - org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore + org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore