diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
index 9748374..e755910 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
@@ -185,6 +185,13 @@
fst
2.24
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ 2.2.3
+
+
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java
new file mode 100644
index 0000000..fafd5d3
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java
@@ -0,0 +1,309 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timeline;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.fusesource.leveldbjni.JniDBFactory;
+import org.iq80.leveldb.DB;
+import org.iq80.leveldb.DBIterator;
+import org.iq80.leveldb.Options;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+
+/**
+ * LevelDB implementation of {@link MapTimelineStore}. This implementation
+ * stores the entity hash map into a LevelDB instance. There are two partitions
+ * of the key space. One partition is to store a entity id to start time mapping:
+ *
+ * i!ENTITY_ID!ENTITY_TYPE -> ENTITY_START_TIME
+ *
+ * The other partition is to store the actual data:
+ *
+ * e!START_TIME!ENTITY_ID!ENTITY_TYPE -> ENTITY_BYTES
+ *
+ * This storage does not have any garbage collection mechanism, and is designed
+ * mainly for caching usages.
+ */
+@Private
+@Unstable
+public class LevelDBCacheTimelineStore extends MapTimelineStore {
+ private static final Log LOG
+ = LogFactory.getLog(LevelDBCacheTimelineStore.class);
+ private static final String CACHED_LDB_FILE_PREFIX = "-timeline-cache.ldb";
+ private String dbId;
+ private DB entityDb;
+ private Configuration configuration;
+
+ public LevelDBCacheTimelineStore(String id) {
+ super(LevelDBCacheTimelineStore.class.getName());
+ dbId = id;
+ entityInsertTimes = new MemoryTimelineStore.HashMapStoreAdapter<>();
+ domainById = new MemoryTimelineStore.HashMapStoreAdapter<>();
+ domainsByOwner = new MemoryTimelineStore.HashMapStoreAdapter<>();
+ }
+
+ @Override
+ protected void serviceInit(Configuration conf) throws Exception {
+ configuration = conf;
+ Options options = new Options();
+ options.createIfMissing(true);
+ options.cacheSize(conf.getLong(
+ YarnConfiguration.TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE));
+ JniDBFactory factory = new JniDBFactory();
+ Path dbPath = new Path(
+ conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH),
+ dbId + CACHED_LDB_FILE_PREFIX);
+ FileSystem localFS = null;
+
+ try {
+ localFS = FileSystem.getLocal(conf);
+ if (!localFS.exists(dbPath)) {
+ if (!localFS.mkdirs(dbPath)) {
+ throw new IOException("Couldn't create directory for leveldb " +
+ "timeline store " + dbPath);
+ }
+ localFS.setPermission(dbPath, LeveldbUtils.LEVELDB_DIR_UMASK);
+ }
+ } finally {
+ IOUtils.cleanup(LOG, localFS);
+ }
+ LOG.info("Using leveldb path " + dbPath);
+ entityDb = factory.open(new File(dbPath.toString()), options);
+ entities = new LevelDBMapAdapter<>(entityDb);
+
+ super.serviceInit(conf);
+ }
+
+ @Override
+ protected void serviceStop() throws Exception {
+ IOUtils.cleanup(LOG, entityDb);
+ Path dbPath = new Path(
+ configuration.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH),
+ dbId + CACHED_LDB_FILE_PREFIX);
+ FileSystem localFS = null;
+ try {
+ localFS = FileSystem.getLocal(configuration);
+ if (!localFS.delete(dbPath, true)) {
+ throw new IOException("Couldn't delete data file for leveldb " +
+ "timeline store " + dbPath);
+ }
+ } finally {
+ IOUtils.cleanup(LOG, localFS);
+ }
+ super.serviceStop();
+ }
+
+ /**
+ * A specialized hash map storage that uses LevelDB for storing entity id to
+ * entity mappings.
+ *
+ * @param an {@link EntityIdentifier} typed hash key
+ * @param a {@link TimelineEntity} typed value
+ */
+ class LevelDBMapAdapter
+ implements TimelineStoreMapAdapter {
+ private static final String TIME_INDEX_PREFIX = "i";
+ private static final String ENTITY_STORAGE_PREFIX = "e";
+ DB entityDb;
+
+ public LevelDBMapAdapter(DB currLevelDb) {
+ entityDb = currLevelDb;
+ }
+
+ @Override
+ public V get(K entityId) {
+ V result = null;
+ // Read the start time from the index
+ byte[] startTimeBytes = entityDb.get(getStartTimeKey(entityId));
+ if (startTimeBytes == null) {
+ return null;
+ }
+
+ // Build the key for the entity storage and read it
+ try {
+ result = getEntityForKey(getEntityKey(entityId, startTimeBytes));
+ } catch (IOException e) {
+ LOG.error("GenericObjectMapper cannot read key from key "
+ + entityId.toString()
+ + " into an object. Read aborted! ");
+ LOG.error(e.getMessage());
+ }
+
+ return result;
+ }
+
+ @Override
+ public void put(K entityId, V entity) {
+ Long startTime = entity.getStartTime();
+ if (startTime == null) {
+ startTime = System.currentTimeMillis();
+ }
+ // Build the key for the entity storage and read it
+ byte[] startTimeBytes = GenericObjectMapper.writeReverseOrderedLong(
+ startTime);
+ try {
+ byte[] valueBytes = GenericObjectMapper.write(entity);
+ entityDb.put(getEntityKey(entityId, startTimeBytes), valueBytes);
+ } catch (IOException e) {
+ LOG.error("GenericObjectMapper cannot write "
+ + entity.getClass().getName()
+ + " into a byte array. Write aborted! ");
+ LOG.error(e.getMessage());
+ }
+
+ // Build the key for the start time index
+ entityDb.put(getStartTimeKey(entityId), startTimeBytes);
+ }
+
+ @Override
+ public void remove(K entityId) {
+ // Read the start time from the index (key starts with an "i") then delete
+ // the record
+ LeveldbUtils.KeyBuilder startTimeKeyBuilder
+ = LeveldbUtils.KeyBuilder.newInstance();
+ startTimeKeyBuilder.add(TIME_INDEX_PREFIX).add(entityId.getId())
+ .add(entityId.getType());
+ byte[] startTimeBytes = entityDb.get(startTimeKeyBuilder.getBytes());
+ if (startTimeBytes == null) {
+ return;
+ }
+ entityDb.delete(startTimeKeyBuilder.getBytes());
+
+ // Build the key for the entity storage and delete it
+ entityDb.delete(getEntityKey(entityId, startTimeBytes));
+ }
+
+ @Override
+ public Iterator valueSetIterator() {
+ return getIterator(null, Long.MAX_VALUE);
+ }
+
+ @Override
+ public Iterator valueSetIterator(V minV) {
+ return getIterator(
+ new EntityIdentifier(minV.getEntityId(), minV.getEntityType()),
+ minV.getStartTime());
+ }
+
+ private Iterator getIterator(
+ EntityIdentifier startId, long startTimeMax) {
+
+ final DBIterator internalDbIterator = entityDb.iterator();
+
+ // we need to iterate from the first element with key greater than or
+ // equal to ENTITY_STORAGE_PREFIX!maxTS(!startId), but stop on the first
+ // key who does not have prefix ENTITY_STORATE_PREFIX
+
+ // decide end prefix
+ LeveldbUtils.KeyBuilder entityPrefixKeyBuilder
+ = LeveldbUtils.KeyBuilder.newInstance();
+ entityPrefixKeyBuilder.add(ENTITY_STORAGE_PREFIX);
+ final byte[] prefixBytes = entityPrefixKeyBuilder.getBytesForLookup();
+ // decide start prefix on top of end prefix and seek
+ final byte[] startTimeBytes
+ = GenericObjectMapper.writeReverseOrderedLong(startTimeMax);
+ entityPrefixKeyBuilder.add(startTimeBytes, true);
+ if (startId != null) {
+ entityPrefixKeyBuilder.add(startId.getId());
+ }
+ final byte[] startPrefixBytes
+ = entityPrefixKeyBuilder.getBytesForLookup();
+ internalDbIterator.seek(startPrefixBytes);
+
+ return new Iterator() {
+ @Override
+ public boolean hasNext() {
+ if (!internalDbIterator.hasNext()) {
+ return false;
+ }
+ Map.Entry nextEntry = internalDbIterator.peekNext();
+ if (LeveldbUtils.prefixMatches(
+ prefixBytes, prefixBytes.length, nextEntry.getKey())) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public V next() {
+ if (hasNext()) {
+ Map.Entry nextRaw = internalDbIterator.next();
+ try {
+ V result = getEntityForKey(nextRaw.getKey());
+ return result;
+ } catch (IOException e) {
+ LOG.error("GenericObjectMapper cannot read key from key "
+ + nextRaw.getKey()
+ + " into an object. Read aborted! ");
+ LOG.error(e.getMessage());
+ }
+ }
+ return null;
+ }
+
+ // We do not support remove operations within one iteration
+ @Override
+ public void remove() {
+ LOG.error("LevelDB map adapter does not support iterate-and-remove"
+ + " use cases. ");
+ }
+ };
+ }
+
+ private V getEntityForKey(byte[] key) throws IOException {
+ byte[] resultRaw = entityDb.get(key);
+ if (resultRaw == null) {
+ return null;
+ }
+ ObjectMapper entityMapper = new ObjectMapper();
+ return (V) entityMapper.readValue(resultRaw, TimelineEntity.class);
+ }
+
+ private byte[] getStartTimeKey(K entityId) {
+ LeveldbUtils.KeyBuilder startTimeKeyBuilder
+ = LeveldbUtils.KeyBuilder.newInstance();
+ startTimeKeyBuilder.add(TIME_INDEX_PREFIX).add(entityId.getId())
+ .add(entityId.getType());
+ return startTimeKeyBuilder.getBytes();
+ }
+
+ private byte[] getEntityKey(K entityId, byte[] startTimeBytes) {
+ LeveldbUtils.KeyBuilder entityKeyBuilder
+ = LeveldbUtils.KeyBuilder.newInstance();
+ entityKeyBuilder.add(ENTITY_STORAGE_PREFIX).add(startTimeBytes, true)
+ .add(entityId.getId()).add(entityId.getType());
+ return entityKeyBuilder.getBytes();
+ }
+ }
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
index 8cfa0c7..368ff05 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
@@ -29,7 +29,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.service.AbstractService;
@@ -41,6 +40,7 @@
import org.apache.hadoop.yarn.server.records.Version;
import org.apache.hadoop.yarn.server.records.impl.pb.VersionPBImpl;
import org.apache.hadoop.yarn.server.timeline.TimelineDataManager.CheckAcl;
+import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils;
import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder;
import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser;
import org.apache.hadoop.yarn.server.utils.LeveldbIterator;
@@ -150,11 +150,6 @@
private static final Version CURRENT_VERSION_INFO = Version
.newInstance(1, 0);
- @Private
- @VisibleForTesting
- static final FsPermission LEVELDB_DIR_UMASK = FsPermission
- .createImmutable((short) 0700);
-
private Map startTimeWriteCache;
private Map startTimeReadCache;
@@ -220,7 +215,7 @@ protected void serviceInit(Configuration conf) throws Exception {
throw new IOException("Couldn't create directory for leveldb " +
"timeline store " + dbPath);
}
- localFS.setPermission(dbPath, LEVELDB_DIR_UMASK);
+ localFS.setPermission(dbPath, LeveldbUtils.LEVELDB_DIR_UMASK);
}
} finally {
IOUtils.cleanup(LOG, localFS);
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MapTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MapTimelineStore.java
new file mode 100644
index 0000000..8d22da1
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MapTimelineStore.java
@@ -0,0 +1,516 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineDomains;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import org.apache.hadoop.yarn.server.timeline.TimelineDataManager.CheckAcl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+
+import static org.apache.hadoop.yarn.server.timeline.TimelineDataManager.DEFAULT_DOMAIN_ID;
+
+/**
+ * Map based implementation of {@link TimelineStore}. A hash map
+ * implementation should be connected to this implementation through a
+ * {@link TimelineStoreMapAdapter}.
+ *
+ * The methods are synchronized to avoid concurrent modifications.
+ *
+ */
+@Private
+@Unstable
+abstract class MapTimelineStore
+ extends AbstractService implements TimelineStore {
+
+ protected TimelineStoreMapAdapter entities;
+ protected TimelineStoreMapAdapter entityInsertTimes;
+ protected TimelineStoreMapAdapter domainById;
+ protected TimelineStoreMapAdapter> domainsByOwner;
+
+
+ public MapTimelineStore() {
+ super(MapTimelineStore.class.getName());
+ }
+
+ public MapTimelineStore(String name) {
+ super(name);
+ }
+
+ @Override
+ public synchronized TimelineEntities getEntities(String entityType, Long limit,
+ Long windowStart, Long windowEnd, String fromId, Long fromTs,
+ NameValuePair primaryFilter, Collection secondaryFilters,
+ EnumSet fields, CheckAcl checkAcl) throws IOException {
+ if (limit == null) {
+ limit = DEFAULT_LIMIT;
+ }
+ if (windowStart == null) {
+ windowStart = Long.MIN_VALUE;
+ }
+ if (windowEnd == null) {
+ windowEnd = Long.MAX_VALUE;
+ }
+ if (fields == null) {
+ fields = EnumSet.allOf(Field.class);
+ }
+
+ Iterator entityIterator = null;
+ if (fromId != null) {
+ TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId,
+ entityType));
+ if (firstEntity == null) {
+ return new TimelineEntities();
+ } else {
+ entityIterator = entities.valueSetIterator(firstEntity);
+ }
+ }
+ if (entityIterator == null) {
+ entityIterator = entities.valueSetIterator();
+ }
+
+ List entitiesSelected = new ArrayList();
+ while (entityIterator.hasNext()) {
+ TimelineEntity entity = entityIterator.next();
+ if (entitiesSelected.size() >= limit) {
+ break;
+ }
+ if (!entity.getEntityType().equals(entityType)) {
+ continue;
+ }
+ if (entity.getStartTime() <= windowStart) {
+ continue;
+ }
+ if (entity.getStartTime() > windowEnd) {
+ continue;
+ }
+ if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(
+ entity.getEntityId(), entity.getEntityType())) > fromTs) {
+ continue;
+ }
+ if (primaryFilter != null &&
+ !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
+ continue;
+ }
+ if (secondaryFilters != null) { // AND logic
+ boolean flag = true;
+ for (NameValuePair secondaryFilter : secondaryFilters) {
+ if (secondaryFilter != null && !matchPrimaryFilter(
+ entity.getPrimaryFilters(), secondaryFilter) &&
+ !matchFilter(entity.getOtherInfo(), secondaryFilter)) {
+ flag = false;
+ break;
+ }
+ }
+ if (!flag) {
+ continue;
+ }
+ }
+ if (entity.getDomainId() == null) {
+ entity.setDomainId(DEFAULT_DOMAIN_ID);
+ }
+ if (checkAcl == null || checkAcl.check(entity)) {
+ entitiesSelected.add(entity);
+ }
+ }
+ List entitiesToReturn = new ArrayList();
+ for (TimelineEntity entitySelected : entitiesSelected) {
+ entitiesToReturn.add(maskFields(entitySelected, fields));
+ }
+ Collections.sort(entitiesToReturn);
+ TimelineEntities entitiesWrapper = new TimelineEntities();
+ entitiesWrapper.setEntities(entitiesToReturn);
+ return entitiesWrapper;
+ }
+
+ @Override
+ public synchronized TimelineEntity getEntity(String entityId, String entityType,
+ EnumSet fieldsToRetrieve) {
+ if (fieldsToRetrieve == null) {
+ fieldsToRetrieve = EnumSet.allOf(Field.class);
+ }
+ TimelineEntity entity = entities.get(new EntityIdentifier(entityId, entityType));
+ if (entity == null) {
+ return null;
+ } else {
+ return maskFields(entity, fieldsToRetrieve);
+ }
+ }
+
+ @Override
+ public synchronized TimelineEvents getEntityTimelines(String entityType,
+ SortedSet entityIds, Long limit, Long windowStart,
+ Long windowEnd,
+ Set eventTypes) {
+ TimelineEvents allEvents = new TimelineEvents();
+ if (entityIds == null) {
+ return allEvents;
+ }
+ if (limit == null) {
+ limit = DEFAULT_LIMIT;
+ }
+ if (windowStart == null) {
+ windowStart = Long.MIN_VALUE;
+ }
+ if (windowEnd == null) {
+ windowEnd = Long.MAX_VALUE;
+ }
+ for (String entityId : entityIds) {
+ EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
+ TimelineEntity entity = entities.get(entityID);
+ if (entity == null) {
+ continue;
+ }
+ EventsOfOneEntity events = new EventsOfOneEntity();
+ events.setEntityId(entityId);
+ events.setEntityType(entityType);
+ for (TimelineEvent event : entity.getEvents()) {
+ if (events.getEvents().size() >= limit) {
+ break;
+ }
+ if (event.getTimestamp() <= windowStart) {
+ continue;
+ }
+ if (event.getTimestamp() > windowEnd) {
+ continue;
+ }
+ if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
+ continue;
+ }
+ events.addEvent(event);
+ }
+ allEvents.addEvent(events);
+ }
+ return allEvents;
+ }
+
+ @Override
+ public TimelineDomain getDomain(String domainId)
+ throws IOException {
+ TimelineDomain domain = domainById.get(domainId);
+ if (domain == null) {
+ return null;
+ } else {
+ return createTimelineDomain(
+ domain.getId(),
+ domain.getDescription(),
+ domain.getOwner(),
+ domain.getReaders(),
+ domain.getWriters(),
+ domain.getCreatedTime(),
+ domain.getModifiedTime());
+ }
+ }
+
+ @Override
+ public TimelineDomains getDomains(String owner)
+ throws IOException {
+ List domains = new ArrayList();
+ Set domainsOfOneOwner = domainsByOwner.get(owner);
+ if (domainsOfOneOwner == null) {
+ return new TimelineDomains();
+ }
+ for (TimelineDomain domain : domainsByOwner.get(owner)) {
+ TimelineDomain domainToReturn = createTimelineDomain(
+ domain.getId(),
+ domain.getDescription(),
+ domain.getOwner(),
+ domain.getReaders(),
+ domain.getWriters(),
+ domain.getCreatedTime(),
+ domain.getModifiedTime());
+ domains.add(domainToReturn);
+ }
+ Collections.sort(domains, new Comparator() {
+ @Override
+ public int compare(
+ TimelineDomain domain1, TimelineDomain domain2) {
+ int result = domain2.getCreatedTime().compareTo(
+ domain1.getCreatedTime());
+ if (result == 0) {
+ return domain2.getModifiedTime().compareTo(
+ domain1.getModifiedTime());
+ } else {
+ return result;
+ }
+ }
+ });
+ TimelineDomains domainsToReturn = new TimelineDomains();
+ domainsToReturn.addDomains(domains);
+ return domainsToReturn;
+ }
+
+ @Override
+ public synchronized TimelinePutResponse put(TimelineEntities data) {
+ TimelinePutResponse response = new TimelinePutResponse();
+ for (TimelineEntity entity : data.getEntities()) {
+ EntityIdentifier entityId =
+ new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
+ // store entity info in memory
+ TimelineEntity existingEntity = entities.get(entityId);
+ boolean needsPut = false;
+ if (existingEntity == null) {
+ existingEntity = new TimelineEntity();
+ existingEntity.setEntityId(entity.getEntityId());
+ existingEntity.setEntityType(entity.getEntityType());
+ existingEntity.setStartTime(entity.getStartTime());
+ if (entity.getDomainId() == null ||
+ entity.getDomainId().length() == 0) {
+ TimelinePutError error = new TimelinePutError();
+ error.setEntityId(entityId.getId());
+ error.setEntityType(entityId.getType());
+ error.setErrorCode(TimelinePutError.NO_DOMAIN);
+ response.addError(error);
+ continue;
+ }
+ existingEntity.setDomainId(entity.getDomainId());
+ // insert a new entity to the storage, update insert time map
+ entityInsertTimes.put(entityId, System.currentTimeMillis());
+ needsPut = true;
+ }
+ if (entity.getEvents() != null) {
+ if (existingEntity.getEvents() == null) {
+ existingEntity.setEvents(entity.getEvents());
+ } else {
+ existingEntity.addEvents(entity.getEvents());
+ }
+ Collections.sort(existingEntity.getEvents());
+ needsPut = true;
+ }
+ // check startTime
+ if (existingEntity.getStartTime() == null) {
+ if (existingEntity.getEvents() == null
+ || existingEntity.getEvents().isEmpty()) {
+ TimelinePutError error = new TimelinePutError();
+ error.setEntityId(entityId.getId());
+ error.setEntityType(entityId.getType());
+ error.setErrorCode(TimelinePutError.NO_START_TIME);
+ response.addError(error);
+ entities.remove(entityId);
+ entityInsertTimes.remove(entityId);
+ continue;
+ } else {
+ Long min = Long.MAX_VALUE;
+ for (TimelineEvent e : entity.getEvents()) {
+ if (min > e.getTimestamp()) {
+ min = e.getTimestamp();
+ }
+ }
+ existingEntity.setStartTime(min);
+ needsPut = true;
+ }
+ }
+ if (entity.getPrimaryFilters() != null) {
+ if (existingEntity.getPrimaryFilters() == null) {
+ existingEntity.setPrimaryFilters(new HashMap>());
+ }
+ for (Entry> pf :
+ entity.getPrimaryFilters().entrySet()) {
+ for (Object pfo : pf.getValue()) {
+ existingEntity.addPrimaryFilter(pf.getKey(), maybeConvert(pfo));
+ needsPut = true;
+ }
+ }
+ }
+ if (entity.getOtherInfo() != null) {
+ if (existingEntity.getOtherInfo() == null) {
+ existingEntity.setOtherInfo(new HashMap());
+ }
+ for (Entry info : entity.getOtherInfo().entrySet()) {
+ existingEntity.addOtherInfo(info.getKey(),
+ maybeConvert(info.getValue()));
+ needsPut = true;
+ }
+ }
+ if (needsPut) {
+ entities.put(entityId, existingEntity);
+ }
+
+ // relate it to other entities
+ if (entity.getRelatedEntities() == null) {
+ continue;
+ }
+ for (Entry> partRelatedEntities : entity
+ .getRelatedEntities().entrySet()) {
+ if (partRelatedEntities == null) {
+ continue;
+ }
+ for (String idStr : partRelatedEntities.getValue()) {
+ EntityIdentifier relatedEntityId =
+ new EntityIdentifier(idStr, partRelatedEntities.getKey());
+ TimelineEntity relatedEntity = entities.get(relatedEntityId);
+ if (relatedEntity != null) {
+ if (relatedEntity.getDomainId().equals(
+ existingEntity.getDomainId())) {
+ relatedEntity.addRelatedEntity(
+ existingEntity.getEntityType(), existingEntity.getEntityId());
+ entities.put(relatedEntityId, relatedEntity);
+ } else {
+ // in this case the entity will be put, but the relation will be
+ // ignored
+ TimelinePutError error = new TimelinePutError();
+ error.setEntityType(existingEntity.getEntityType());
+ error.setEntityId(existingEntity.getEntityId());
+ error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
+ response.addError(error);
+ }
+ } else {
+ relatedEntity = new TimelineEntity();
+ relatedEntity.setEntityId(relatedEntityId.getId());
+ relatedEntity.setEntityType(relatedEntityId.getType());
+ relatedEntity.setStartTime(existingEntity.getStartTime());
+ relatedEntity.addRelatedEntity(existingEntity.getEntityType(),
+ existingEntity.getEntityId());
+ relatedEntity.setDomainId(existingEntity.getDomainId());
+ entities.put(relatedEntityId, relatedEntity);
+ entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
+ }
+ }
+ }
+ }
+ return response;
+ }
+
+ public void put(TimelineDomain domain) throws IOException {
+ TimelineDomain domainToReplace =
+ domainById.get(domain.getId());
+ Long currentTimestamp = System.currentTimeMillis();
+ TimelineDomain domainToStore = createTimelineDomain(
+ domain.getId(), domain.getDescription(), domain.getOwner(),
+ domain.getReaders(), domain.getWriters(),
+ (domainToReplace == null ?
+ currentTimestamp : domainToReplace.getCreatedTime()),
+ currentTimestamp);
+ domainById.put(domainToStore.getId(), domainToStore);
+ Set domainsByOneOwner =
+ domainsByOwner.get(domainToStore.getOwner());
+ if (domainsByOneOwner == null) {
+ domainsByOneOwner = new HashSet();
+ domainsByOwner.put(domainToStore.getOwner(), domainsByOneOwner);
+ }
+ if (domainToReplace != null) {
+ domainsByOneOwner.remove(domainToReplace);
+ }
+ domainsByOneOwner.add(domainToStore);
+ }
+
+ private static TimelineDomain createTimelineDomain(
+ String id, String description, String owner,
+ String readers, String writers,
+ Long createdTime, Long modifiedTime) {
+ TimelineDomain domainToStore = new TimelineDomain();
+ domainToStore.setId(id);
+ domainToStore.setDescription(description);
+ domainToStore.setOwner(owner);
+ domainToStore.setReaders(readers);
+ domainToStore.setWriters(writers);
+ domainToStore.setCreatedTime(createdTime);
+ domainToStore.setModifiedTime(modifiedTime);
+ return domainToStore;
+ }
+
+ private static TimelineEntity maskFields(
+ TimelineEntity entity, EnumSet fields) {
+ // Conceal the fields that are not going to be exposed
+ TimelineEntity entityToReturn = new TimelineEntity();
+ entityToReturn.setEntityId(entity.getEntityId());
+ entityToReturn.setEntityType(entity.getEntityType());
+ entityToReturn.setStartTime(entity.getStartTime());
+ entityToReturn.setDomainId(entity.getDomainId());
+ // Deep copy
+ if (fields.contains(Field.EVENTS)) {
+ entityToReturn.addEvents(entity.getEvents());
+ } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
+ entityToReturn.addEvent(entity.getEvents().get(0));
+ } else {
+ entityToReturn.setEvents(null);
+ }
+ if (fields.contains(Field.RELATED_ENTITIES)) {
+ entityToReturn.addRelatedEntities(entity.getRelatedEntities());
+ } else {
+ entityToReturn.setRelatedEntities(null);
+ }
+ if (fields.contains(Field.PRIMARY_FILTERS)) {
+ entityToReturn.addPrimaryFilters(entity.getPrimaryFilters());
+ } else {
+ entityToReturn.setPrimaryFilters(null);
+ }
+ if (fields.contains(Field.OTHER_INFO)) {
+ entityToReturn.addOtherInfo(entity.getOtherInfo());
+ } else {
+ entityToReturn.setOtherInfo(null);
+ }
+ return entityToReturn;
+ }
+
+ private static boolean matchFilter(Map tags,
+ NameValuePair filter) {
+ Object value = tags.get(filter.getName());
+ if (value == null) { // doesn't have the filter
+ return false;
+ } else if (!value.equals(filter.getValue())) { // doesn't match the filter
+ return false;
+ }
+ return true;
+ }
+
+ private static boolean matchPrimaryFilter(Map> tags,
+ NameValuePair filter) {
+ Set