diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java index 20304bd..611b763 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntity.java @@ -64,6 +64,7 @@ new HashMap>(); private Map otherInfo = new HashMap(); + private String namespaceId; public TimelineEntity() { @@ -325,6 +326,26 @@ public void setOtherInfo(Map otherInfo) { this.otherInfo = otherInfo; } + /** + * Get the ID of the namespace that the entity is to be put + * + * @return the namespace ID + */ + @XmlElement(name = "namespace") + public String getNamespaceId() { + return namespaceId; + } + + /** + * Set the ID of the namespace that the entity is to be put + * + * @param namespaceId + * the name space ID + */ + public void setNamespaceId(String namespaceId) { + this.namespaceId = namespaceId; + } + @Override public int hashCode() { // generated by eclipse diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java index 77a97ba..7eb910c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java @@ -118,6 +118,17 @@ public void setErrors(List errors) { */ public static final int ACCESS_DENIED = 4; + /** + * Error code returned if the entity doesn't have an valid namespaced ID + */ + public static final int NO_NAMESPACE = 5; + + /** + * Error code returned if the user is denied to relate the entity to another + * one in different namespace + */ + public static final int FORBIDDEN_RELATION = 6; + private String entityId; private String entityType; private int errorCode; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java index 8d89b73..2640137 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java @@ -61,6 +61,7 @@ public void testEntities() throws Exception { entity.addPrimaryFilter("pkey2", "pval2"); entity.addOtherInfo("okey1", "oval1"); entity.addOtherInfo("okey2", "oval2"); + entity.setNamespaceId("namespace id " + j); entities.addEntity(entity); } LOG.info("Entities in JSON:"); @@ -74,6 +75,7 @@ public void testEntities() throws Exception { Assert.assertEquals(2, entity1.getEvents().size()); Assert.assertEquals(2, entity1.getPrimaryFilters().size()); Assert.assertEquals(2, entity1.getOtherInfo().size()); + Assert.assertEquals("namespace id 0", entity1.getNamespaceId()); TimelineEntity entity2 = entities.getEntities().get(1); Assert.assertEquals("entity id 1", entity2.getEntityId()); Assert.assertEquals("entity type 1", entity2.getEntityType()); @@ -81,6 +83,7 @@ public void testEntities() throws Exception { Assert.assertEquals(2, entity2.getEvents().size()); Assert.assertEquals(2, entity2.getPrimaryFilters().size()); Assert.assertEquals(2, entity2.getOtherInfo().size()); + Assert.assertEquals("namespace id 1", entity2.getNamespaceId()); } @Test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java index e95187d..bd88cfe 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java @@ -240,6 +240,7 @@ private static TimelineEntity generateEntity() { entity.addPrimaryFilter("pkey2", "pval2"); entity.addOtherInfo("okey1", "oval1"); entity.addOtherInfo("okey2", "oval2"); + entity.setNamespaceId("namespace id 1"); return entity; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index c61b80e..fdeb33e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -81,6 +81,7 @@ protected void serviceInit(Configuration conf) throws Exception { secretManagerService = createTimelineDelegationTokenSecretManagerService(conf); addService(secretManagerService); timelineDataManager = createTimelineDataManager(conf); + addService(timelineDataManager); // init generic history service afterwards historyManager = createApplicationHistoryManager(conf); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java index 8d4fdbd..aae25e0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java @@ -113,6 +113,9 @@ * RELATED_ENTITIES_COLUMN + relatedentity type + relatedentity id * * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id + + * NAMESPACE_ID_COLUMN + * + * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id + * INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN + relatedentity type + * relatedentity id * @@ -146,6 +149,7 @@ private static final byte[] RELATED_ENTITIES_COLUMN = "r".getBytes(); private static final byte[] INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN = "z".getBytes(); + private static final byte[] NAMESPACE_ID_COLUMN = "n".getBytes(); private static final byte[] NAMESPACE_ENTRY_PREFIX = "n".getBytes(); private static final byte[] OWNER_LOOKUP_PREFIX = "o".getBytes(); @@ -521,6 +525,10 @@ private static TimelineEntity getEntity(String entityId, String entityType, entity.addEvent(event); } } + } else if (key[prefixlen] == NAMESPACE_ID_COLUMN[0]) { + byte[] v = iterator.peekNext().getValue(); + String namespaceId = new String(v); + entity.setNamespaceId(namespaceId); } else { if (key[prefixlen] != INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) { @@ -793,6 +801,7 @@ private void put(TimelineEntity entity, TimelinePutResponse response) { List relatedEntitiesWithoutStartTimes = new ArrayList(); byte[] revStartTime = null; + Map> primaryFilters = null; try { writeBatch = db.createWriteBatch(); List events = entity.getEvents(); @@ -812,7 +821,7 @@ private void put(TimelineEntity entity, TimelinePutResponse response) { revStartTime = writeReverseOrderedLong(startAndInsertTime .startTime); - Map> primaryFilters = entity.getPrimaryFilters(); + primaryFilters = entity.getPrimaryFilters(); // write entity marker byte[] markerKey = createEntityMarkerKey(entity.getEntityId(), @@ -857,6 +866,21 @@ private void put(TimelineEntity entity, TimelinePutResponse response) { relatedEntitiesWithoutStartTimes.add( new EntityIdentifier(relatedEntityId, relatedEntityType)); continue; + } else { + byte[] namespaceIdBytes = db.get(createNamespaceIdKey( + relatedEntityId, relatedEntityType, relatedEntityStartTime)); + // This is the existing entity + String namespaceId = new String(namespaceIdBytes); + if (!namespaceId.equals(entity.getNamespaceId())) { + // in this case the entity will be put, but the relation will be + // ignored + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION); + response.addError(error); + continue; + } } // write "forward" entry (related entity -> entity) key = createRelatedEntityKey(relatedEntityId, @@ -893,6 +917,23 @@ private void put(TimelineEntity entity, TimelinePutResponse response) { writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); } } + + // write namespace id entry + byte[] key = createNamespaceIdKey(entity.getEntityId(), + entity.getEntityType(), revStartTime); + if (entity.getNamespaceId() == null || + entity.getNamespaceId().length() == 0) { + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entity.getEntityId()); + error.setEntityType(entity.getEntityType()); + error.setErrorCode(TimelinePutError.NO_NAMESPACE); + response.addError(error); + return; + } else { + writeBatch.put(key, entity.getNamespaceId().getBytes()); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, + entity.getNamespaceId().getBytes()); + } db.write(writeBatch); } catch (IOException e) { LOG.error("Error putting entity " + entity.getEntityId() + @@ -920,6 +961,10 @@ private void put(TimelineEntity entity, TimelinePutResponse response) { } byte[] relatedEntityStartTime = writeReverseOrderedLong( relatedEntityStartAndInsertTime.startTime); + // This is the new entity, the namespace should be the same + byte[] key = createNamespaceIdKey(relatedEntity.getId(), + relatedEntity.getType(), relatedEntityStartTime); + db.put(key, entity.getNamespaceId().getBytes()); db.put(createRelatedEntityKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime, entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES); @@ -960,6 +1005,23 @@ private static void writePrimaryFilterEntries(WriteBatch writeBatch, } } + /** + * For a given key / value pair that has been written to the db, + * write additional entries to the db for each primary filter. + */ + private static void writePrimaryFilterEntries(DB db, + Map> primaryFilters, byte[] key, byte[] value) + throws IOException { + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry> pf : primaryFilters.entrySet()) { + for (Object pfval : pf.getValue()) { + db.put(addPrimaryFilterToKey(pf.getKey(), pfval, + key), value); + } + } + } + } + @Override public TimelinePutResponse put(TimelineEntities entities) { try { @@ -1266,6 +1328,15 @@ private static void addRelatedEntity(TimelineEntity entity, byte[] key, } /** + * Creates a namespace id key, serializing ENTITY_ENTRY_PREFIX + + * entity type + revstarttime + entity id + NAMESPACE_ID_COLUMN. + */ + private static byte[] createNamespaceIdKey(String entityId, + String entityType, byte[] revStartTime) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType) + .add(revStartTime).add(entityId).add(NAMESPACE_ID_COLUMN).getBytes(); + } + /** * Clears the cache to test reloading start times from leveldb (only for * testing). */ diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java index 9bd7a12..4320f0d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/MemoryTimelineStore.java @@ -282,6 +282,16 @@ public TimelinePutResponse put(TimelineEntities data) { existingEntity.setEntityId(entity.getEntityId()); existingEntity.setEntityType(entity.getEntityType()); existingEntity.setStartTime(entity.getStartTime()); + if (entity.getNamespaceId() == null || + entity.getNamespaceId().length() == 0) { + TimelinePutError error = new TimelinePutError(); + error.setEntityId(entityId.getId()); + error.setEntityType(entityId.getType()); + error.setErrorCode(TimelinePutError.NO_NAMESPACE); + response.addError(error); + continue; + } + existingEntity.setNamespaceId(entity.getNamespaceId()); entities.put(entityId, existingEntity); entityInsertTimes.put(entityId, System.currentTimeMillis()); } @@ -349,8 +359,19 @@ public TimelinePutResponse put(TimelineEntities data) { new EntityIdentifier(idStr, partRelatedEntities.getKey()); TimelineEntity relatedEntity = entities.get(relatedEntityId); if (relatedEntity != null) { - relatedEntity.addRelatedEntity( - existingEntity.getEntityType(), existingEntity.getEntityId()); + if (relatedEntity.getNamespaceId().equals( + existingEntity.getNamespaceId())) { + relatedEntity.addRelatedEntity( + existingEntity.getEntityType(), existingEntity.getEntityId()); + } else { + // in this case the entity will be put, but the relation will be + // ignored + TimelinePutError error = new TimelinePutError(); + error.setEntityType(existingEntity.getEntityType()); + error.setEntityId(existingEntity.getEntityId()); + error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION); + response.addError(error); + } } else { relatedEntity = new TimelineEntity(); relatedEntity.setEntityId(relatedEntityId.getId()); @@ -358,6 +379,7 @@ public TimelinePutResponse put(TimelineEntities data) { relatedEntity.setStartTime(existingEntity.getStartTime()); relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId()); + relatedEntity.setNamespaceId(existingEntity.getNamespaceId()); entities.put(relatedEntityId, relatedEntity); entityInsertTimes.put(relatedEntityId, System.currentTimeMillis()); } @@ -412,6 +434,7 @@ private static TimelineEntity maskFields( entityToReturn.setEntityId(entity.getEntityId()); entityToReturn.setEntityType(entity.getEntityType()); entityToReturn.setStartTime(entity.getStartTime()); + entityToReturn.setNamespaceId(entity.getNamespaceId()); // Deep copy if (fields.contains(Field.EVENTS)) { entityToReturn.addEvents(entity.getEvents()); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java index 217fbe4..8f0e252 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java @@ -30,7 +30,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; @@ -42,23 +45,47 @@ import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import com.google.common.annotations.VisibleForTesting; + /** * The class wrap over the timeline store and the ACLs manager. It does some non * trivial manipulation of the timeline data before putting or after getting it * from the timeline store, and checks the user's access to it. * */ -public class TimelineDataManager { +public class TimelineDataManager extends AbstractService { private static final Log LOG = LogFactory.getLog(TimelineDataManager.class); + @VisibleForTesting + public static final String DEFAULT_NAMESPACE_ID = "DEFAULT"; private TimelineStore store; private TimelineACLsManager timelineACLsManager; public TimelineDataManager(TimelineStore store, TimelineACLsManager timelineACLsManager) { + super(TimelineDataManager.class.getName()); this.store = store; this.timelineACLsManager = timelineACLsManager; + timelineACLsManager.setTimelineStore(store); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + TimelineNamespace namespace = store.getNamespace("DEFAULT"); + if (namespace == null) { + // create a default namespace, which allows everybody to access and + // modify the entities in it. + namespace = new TimelineNamespace(); + namespace.setId(DEFAULT_NAMESPACE_ID); + namespace.setDescription("System Default Namespace"); + namespace.setOwner( + UserGroupInformation.getCurrentUser().getShortUserName()); + namespace.setReaders("*"); + namespace.setWriters("*"); + store.put(namespace); + } + super.serviceInit(conf); } /** @@ -98,7 +125,8 @@ public TimelineEntities getEntities( TimelineEntity entity = entitiesItr.next(); try { // check ACLs - if (!timelineACLsManager.checkAccess(callerUGI, entity)) { + if (!timelineACLsManager.checkAccess( + callerUGI, ApplicationAccessType.VIEW_APP, entity)) { entitiesItr.remove(); } else { // clean up system data @@ -141,7 +169,8 @@ public TimelineEntity getEntity( store.getEntity(entityId, entityType, fields); if (entity != null) { // check ACLs - if (!timelineACLsManager.checkAccess(callerUGI, entity)) { + if (!timelineACLsManager.checkAccess( + callerUGI, ApplicationAccessType.VIEW_APP, entity)) { entity = null; } else { // clean up the system data @@ -189,7 +218,8 @@ public TimelineEvents getEvents( eventsOfOneEntity.getEntityType(), EnumSet.of(Field.PRIMARY_FILTERS)); // check ACLs - if (!timelineACLsManager.checkAccess(callerUGI, entity)) { + if (!timelineACLsManager.checkAccess( + callerUGI, ApplicationAccessType.VIEW_APP, entity)) { eventsItr.remove(); } } catch (Exception e) { @@ -225,16 +255,29 @@ public TimelinePutResponse postEntities( EntityIdentifier entityID = new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); + // if the namespace id is not specified, the entity will be put into + // the default namespace + if (entity.getNamespaceId() == null || + entity.getNamespaceId().length() == 0) { + entity.setNamespaceId(DEFAULT_NAMESPACE_ID); + } + // check if there is existing entity TimelineEntity existingEntity = null; try { existingEntity = store.getEntity(entityID.getId(), entityID.getType(), EnumSet.of(Field.PRIMARY_FILTERS)); - if (existingEntity != null - && !timelineACLsManager.checkAccess(callerUGI, existingEntity)) { - throw new YarnException("The timeline entity " + entityID - + " was not put by " + callerUGI + " before"); + if (existingEntity != null && + !existingEntity.getNamespaceId().equals(entity.getNamespaceId())) { + throw new YarnException("The namespace of the timeline entity " + + entityID + " is not allowed to be changed."); + } + if (!timelineACLsManager.checkAccess( + callerUGI, ApplicationAccessType.MODIFY_APP, entity)) { + throw new YarnException(callerUGI + + " is not allowed to put the timeline entity " + entityID + + " into the namespace " + entity.getNamespaceId() + "."); } } catch (Exception e) { // Skip the entity which already exists and was put by others @@ -307,6 +350,11 @@ public void putNamespace(TimelineNamespace namespace, namespace.setOwner(existingNamespace.getOwner()); } store.put(namespace); + // If the namespace exists already, it is likely to be in the cache. + // We need to invalidate it. + if (existingNamespace != null) { + timelineACLsManager.invalidate(namespace); + } } /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java index 4f6a99a..8c99205 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineACLsManager.java @@ -19,19 +19,26 @@ package org.apache.hadoop.yarn.server.timeline.security; import java.io.IOException; -import java.util.Set; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.apache.commons.collections.map.LRUMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineNamespace; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.AdminACLsManager; import org.apache.hadoop.yarn.server.timeline.EntityIdentifier; -import org.apache.hadoop.yarn.server.timeline.TimelineStore.SystemFilter; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.util.StringHelper; import com.google.common.annotations.VisibleForTesting; @@ -42,14 +49,58 @@ public class TimelineACLsManager { private static final Log LOG = LogFactory.getLog(TimelineACLsManager.class); + private static final int NAMESPACE_ACCESS_ENTRY_CACHE_SIZE = 100; private AdminACLsManager adminAclsManager; + private Map aclExts; + private TimelineStore store; + @SuppressWarnings("unchecked") public TimelineACLsManager(Configuration conf) { this.adminAclsManager = new AdminACLsManager(conf); + aclExts = Collections.synchronizedMap( + new LRUMap(NAMESPACE_ACCESS_ENTRY_CACHE_SIZE)); + } + + public void setTimelineStore(TimelineStore store) { + this.store = store; + } + + private AccessControlListExt loadNamespaceFromTimelineStore( + String namespaceId) throws IOException { + if (store == null) { + return null; + } + TimelineNamespace namespace = store.getNamespace(namespaceId); + if (namespace == null) { + return null; + } else { + return putNamespaceIntoCache(namespace); + } + } + + public void invalidate(TimelineNamespace namespace) { + if (aclExts.containsKey(namespace.getId())) { + putNamespaceIntoCache(namespace); + } + } + + private AccessControlListExt putNamespaceIntoCache( + TimelineNamespace namespace) { + Map acls + = new HashMap(2); + acls.put(ApplicationAccessType.VIEW_APP, + new AccessControlList(StringHelper.cjoin(namespace.getReaders()))); + acls.put(ApplicationAccessType.MODIFY_APP, + new AccessControlList(StringHelper.cjoin(namespace.getWriters()))); + AccessControlListExt aclExt = + new AccessControlListExt(namespace.getOwner(), acls); + aclExts.put(namespace.getId(), aclExt); + return aclExt; } public boolean checkAccess(UserGroupInformation callerUGI, + ApplicationAccessType applicationAccessType, TimelineEntity entity) throws YarnException, IOException { if (LOG.isDebugEnabled()) { LOG.debug("Verifying the access of " @@ -62,21 +113,33 @@ public boolean checkAccess(UserGroupInformation callerUGI, return true; } - Set values = - entity.getPrimaryFilters().get( - SystemFilter.ENTITY_OWNER.toString()); - if (values == null || values.size() != 1) { - throw new YarnException("Owner information of the timeline entity " + // find namespace owner and acls + AccessControlListExt aclExt = aclExts.get(entity.getNamespaceId()); + if (aclExt == null) { + aclExt = loadNamespaceFromTimelineStore(entity.getNamespaceId()); + } + if (aclExt == null) { + throw new YarnException("Namespace information of the timeline entity " + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()) - + " is corrupted."); + + " doesn't exist."); } - String owner = values.iterator().next().toString(); - // TODO: Currently we just check the user is the admin or the timeline - // entity owner. In the future, we need to check whether the user is in the - // allowed user/group list + String owner = aclExt.owner; + AccessControlList namespaceACL = aclExt.acls.get(applicationAccessType); + if (namespaceACL == null) { + if (LOG.isDebugEnabled()) { + LOG.debug("ACL not found for access-type " + applicationAccessType + + " for namespace " + entity.getNamespaceId() + " owned by " + + owner + ". Using default [" + + YarnConfiguration.DEFAULT_YARN_APP_ACL + "]"); + } + namespaceACL = + new AccessControlList(YarnConfiguration.DEFAULT_YARN_APP_ACL); + } + if (callerUGI != null && (adminAclsManager.isAdmin(callerUGI) || - callerUGI.getShortUserName().equals(owner))) { + callerUGI.getShortUserName().equals(owner) || + namespaceACL.isUserAllowed(callerUGI))) { return true; } return false; @@ -116,4 +179,14 @@ public boolean checkAccess(UserGroupInformation callerUGI, return oldAdminACLsManager; } + private static class AccessControlListExt { + private String owner; + private Map acls; + + public AccessControlListExt( + String owner, Map acls) { + this.owner = owner; + this.acls = acls; + } + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java index bcd8e45..2495357 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java @@ -30,7 +30,6 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer; import org.junit.After; -import org.junit.Assert; import org.junit.Test; public class TestApplicationHistoryServer { @@ -44,7 +43,7 @@ public void testStartStopServer() throws Exception { Configuration config = new YarnConfiguration(); historyServer.init(config); assertEquals(STATE.INITED, historyServer.getServiceState()); - assertEquals(4, historyServer.getServices().size()); + assertEquals(5, historyServer.getServices().size()); ApplicationHistoryClientService historyService = historyServer.getClientService(); assertNotNull(historyServer.getClientService()); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java index b752bcc..a2432a5 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java @@ -160,21 +160,22 @@ private boolean deleteNextEntity(String entityType, byte[] ts) @Test public void testGetEntityTypes() throws IOException { List entityTypes = ((LeveldbTimelineStore)store).getEntityTypes(); - assertEquals(4, entityTypes.size()); + assertEquals(5, entityTypes.size()); assertEquals(entityType1, entityTypes.get(0)); assertEquals(entityType2, entityTypes.get(1)); assertEquals(entityType4, entityTypes.get(2)); assertEquals(entityType5, entityTypes.get(3)); + assertEquals(entityType7, entityTypes.get(4)); } @Test public void testDeleteEntities() throws IOException, InterruptedException { - assertEquals(2, getEntities("type_1").size()); + assertEquals(3, getEntities("type_1").size()); assertEquals(1, getEntities("type_2").size()); assertEquals(false, deleteNextEntity(entityType1, - writeReverseOrderedLong(122l))); - assertEquals(2, getEntities("type_1").size()); + writeReverseOrderedLong(60l))); + assertEquals(3, getEntities("type_1").size()); assertEquals(1, getEntities("type_2").size()); assertEquals(true, deleteNextEntity(entityType1, @@ -183,16 +184,19 @@ public void testDeleteEntities() throws IOException, InterruptedException { assertEquals(1, entities.size()); verifyEntityInfo(entityId2, entityType2, events2, Collections.singletonMap( entityType1, Collections.singleton(entityId1b)), EMPTY_PRIMARY_FILTERS, - EMPTY_MAP, entities.get(0)); + EMPTY_MAP, entities.get(0), namespaceId1); entities = getEntitiesWithPrimaryFilter("type_1", userFilter); - assertEquals(1, entities.size()); + assertEquals(2, entities.size()); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); + // can retrieve entities across namespaces + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1), namespaceId2); ((LeveldbTimelineStore)store).discardOldEntities(-123l); - assertEquals(1, getEntities("type_1").size()); + assertEquals(2, getEntities("type_1").size()); assertEquals(0, getEntities("type_2").size()); - assertEquals(3, ((LeveldbTimelineStore)store).getEntityTypes().size()); + assertEquals(4, ((LeveldbTimelineStore)store).getEntityTypes().size()); ((LeveldbTimelineStore)store).discardOldEntities(123l); assertEquals(0, getEntities("type_1").size()); @@ -210,7 +214,7 @@ public void testDeleteEntitiesPrimaryFilters() TimelineEntities atsEntities = new TimelineEntities(); atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter, - null))); + null, namespaceId1))); TimelinePutResponse response = store.put(atsEntities); assertEquals(0, response.getErrors().size()); @@ -219,18 +223,21 @@ public void testDeleteEntitiesPrimaryFilters() pfPair); assertEquals(1, entities.size()); verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2), - EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0)); + EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0), + namespaceId1); entities = getEntitiesWithPrimaryFilter("type_1", userFilter); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); ((LeveldbTimelineStore)store).discardOldEntities(-123l); assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size()); - assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); ((LeveldbTimelineStore)store).discardOldEntities(123l); assertEquals(0, getEntities("type_1").size()); @@ -245,9 +252,9 @@ public void testDeleteEntitiesPrimaryFilters() public void testFromTsWithDeletion() throws IOException, InterruptedException { long l = System.currentTimeMillis(); - assertEquals(2, getEntitiesFromTs("type_1", l).size()); + assertEquals(3, getEntitiesFromTs("type_1", l).size()); assertEquals(1, getEntitiesFromTs("type_2", l).size()); - assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + assertEquals(3, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, l).size()); ((LeveldbTimelineStore)store).discardOldEntities(123l); assertEquals(0, getEntitiesFromTs("type_1", l).size()); @@ -263,9 +270,9 @@ public void testFromTsWithDeletion() assertEquals(0, getEntitiesFromTs("type_2", l).size()); assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, l).size()); - assertEquals(2, getEntities("type_1").size()); + assertEquals(3, getEntities("type_1").size()); assertEquals(1, getEntities("type_2").size()); - assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); } @Test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java index da955f4..2ef6de8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java @@ -66,6 +66,10 @@ protected String entityType4; protected String entityId5; protected String entityType5; + protected String entityId6; + protected String entityId7; + protected String entityType7; + protected Map> primaryFilters; protected Map secondaryFilters; protected Map allFilters; @@ -86,6 +90,8 @@ protected List events1; protected List events2; protected long beforeTs; + protected String namespaceId1; + protected String namespaceId2; /** * Load test entity data into the given store @@ -123,6 +129,9 @@ protected void loadTestEntityData() throws IOException { String entityType4 = "type_4"; String entityId5 = "id_5"; String entityType5 = "type_5"; + String entityId6 = "id_6"; + String entityId7 = "id_7"; + String entityType7 = "type_7"; Map> relatedEntities = new HashMap>(); @@ -134,19 +143,19 @@ protected void loadTestEntityData() throws IOException { events.add(ev3); events.add(ev4); entities.setEntities(Collections.singletonList(createEntity(entityId2, - entityType2, null, events, null, null, null))); + entityType2, null, events, null, null, null, "namespace_id_1"))); TimelinePutResponse response = store.put(entities); assertEquals(0, response.getErrors().size()); TimelineEvent ev1 = createEvent(123l, "start_event", null); entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), - relatedEntities, primaryFilters, otherInfo1))); + relatedEntities, primaryFilters, otherInfo1, "namespace_id_1"))); response = store.put(entities); assertEquals(0, response.getErrors().size()); entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, null, Collections.singletonList(ev1), relatedEntities, - primaryFilters, otherInfo1))); + primaryFilters, otherInfo1, "namespace_id_1"))); response = store.put(entities); assertEquals(0, response.getErrors().size()); @@ -157,17 +166,18 @@ protected void loadTestEntityData() throws IOException { otherInfo2.put("info2", "val2"); entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, null, Collections.singletonList(ev2), null, - primaryFilters, otherInfo2))); + primaryFilters, otherInfo2, "namespace_id_1"))); response = store.put(entities); assertEquals(0, response.getErrors().size()); entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, - primaryFilters, otherInfo2))); + primaryFilters, otherInfo2, "namespace_id_1"))); response = store.put(entities); assertEquals(0, response.getErrors().size()); entities.setEntities(Collections.singletonList(createEntity( - "badentityid", "badentity", null, null, null, null, otherInfo1))); + "badentityid", "badentity", null, null, null, null, otherInfo1, + "namespace_id_1"))); response = store.put(entities); assertEquals(1, response.getErrors().size()); TimelinePutError error = response.getErrors().get(0); @@ -178,9 +188,28 @@ protected void loadTestEntityData() throws IOException { relatedEntities.clear(); relatedEntities.put(entityType5, Collections.singleton(entityId5)); entities.setEntities(Collections.singletonList(createEntity(entityId4, - entityType4, 42l, null, relatedEntities, null, null))); + entityType4, 42l, null, relatedEntities, null, null, + "namespace_id_1"))); response = store.put(entities); - assertEquals(0, response.getErrors().size()); + + relatedEntities.clear(); + otherInfo1.put("info2", "val2"); + entities.setEntities(Collections.singletonList(createEntity(entityId6, + entityType1, 61l, null, relatedEntities, primaryFilters, otherInfo1, + "namespace_id_2"))); + response = store.put(entities); + + relatedEntities.clear(); + relatedEntities.put(entityType1, Collections.singleton(entityId1)); + entities.setEntities(Collections.singletonList(createEntity(entityId7, + entityType7, 62l, null, relatedEntities, null, null, + "namespace_id_2"))); + response = store.put(entities); + assertEquals(1, response.getErrors().size()); + assertEquals(entityType7, response.getErrors().get(0).getEntityType()); + assertEquals(entityId7, response.getErrors().get(0).getEntityId()); + assertEquals(TimelinePutError.FORBIDDEN_RELATION, + response.getErrors().get(0).getErrorCode()); } /** @@ -235,6 +264,9 @@ protected void loadVerificationEntityData() throws Exception { entityType4 = "type_4"; entityId5 = "id_5"; entityType5 = "type_5"; + entityId6 = "id_6"; + entityId7 = "id_7"; + entityType7 = "type_7"; ev1 = createEvent(123l, "start_event", null); @@ -261,6 +293,9 @@ protected void loadVerificationEntityData() throws Exception { events2 = new ArrayList(); events2.add(ev3); events2.add(ev4); + + namespaceId1 = "namespace_id_1"; + namespaceId2 = "namespace_id_2"; } private TimelineNamespace namespace1; @@ -282,7 +317,7 @@ protected void loadTestNamespaceData() throws IOException { namespace2.setDescription("description_2"); namespace2.setOwner("owner_2"); namespace2.setReaders("reader_user_2 reader_group_2"); - namespace2.setWriters("writer_user_2writer_group_2"); + namespace2.setWriters("writer_user_2 writer_group_2"); store.put(namespace2); // Wait a second before updating the namespace information @@ -311,50 +346,62 @@ protected void loadTestNamespaceData() throws IOException { public void testGetSingleEntity() throws IOException { // test getting entity info verifyEntityInfo(null, null, null, null, null, null, - store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class))); + store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)), + namespaceId1); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, 123l, store.getEntity(entityId1, - entityType1, EnumSet.allOf(Field.class))); + entityType1, EnumSet.allOf(Field.class)), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, 123l, store.getEntity(entityId1b, - entityType1, EnumSet.allOf(Field.class))); + entityType1, EnumSet.allOf(Field.class)), namespaceId1); verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2, - entityType2, EnumSet.allOf(Field.class))); + entityType2, EnumSet.allOf(Field.class)), namespaceId1); verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES, EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4, - entityType4, EnumSet.allOf(Field.class))); + entityType4, EnumSet.allOf(Field.class)), namespaceId1); verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2, EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5, - entityType5, EnumSet.allOf(Field.class))); + entityType5, EnumSet.allOf(Field.class)), namespaceId1); // test getting single fields verifyEntityInfo(entityId1, entityType1, events1, null, null, null, - store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS))); + store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS)), + namespaceId1); verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2), null, null, null, store.getEntity(entityId1, entityType1, - EnumSet.of(Field.LAST_EVENT_ONLY))); + EnumSet.of(Field.LAST_EVENT_ONLY)), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1, - null)); + null), namespaceId1); verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null, store.getEntity(entityId1, entityType1, - EnumSet.of(Field.PRIMARY_FILTERS))); + EnumSet.of(Field.PRIMARY_FILTERS)), namespaceId1); verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo, - store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO))); + store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO)), + namespaceId1); verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null, store.getEntity(entityId2, entityType2, - EnumSet.of(Field.RELATED_ENTITIES))); + EnumSet.of(Field.RELATED_ENTITIES)), namespaceId1); + + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, store.getEntity(entityId6, entityType1, + EnumSet.allOf(Field.class)), namespaceId2); + + // entity is created, but it doesn't relate to + verifyEntityInfo(entityId7, entityType7, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, store.getEntity(entityId7, entityType7, + EnumSet.allOf(Field.class)), namespaceId2); } protected List getEntities(String entityType) @@ -438,28 +485,30 @@ public void testGetEntities() throws IOException { getEntitiesWithPrimaryFilter("type_6", userFilter).size()); List entities = getEntities("type_1"); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntities("type_2"); assertEquals(1, entities.size()); verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0), namespaceId1); entities = getEntities("type_1", 1l, null, null, null, EnumSet.allOf(Field.class)); assertEquals(1, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); entities = getEntities("type_1", 1l, 0l, null, null, EnumSet.allOf(Field.class)); assertEquals(1, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); entities = getEntities("type_1", null, 234l, null, null, EnumSet.allOf(Field.class)); @@ -475,35 +524,48 @@ public void testGetEntities() throws IOException { entities = getEntities("type_1", null, null, 345l, null, EnumSet.allOf(Field.class)); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntities("type_1", null, null, 123l, null, EnumSet.allOf(Field.class)); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); } public void testGetEntitiesWithFromId() throws IOException { List entities = getEntitiesFromId("type_1", entityId1); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesFromId("type_1", entityId1b); - assertEquals(1, entities.size()); + assertEquals(2, entities.size()); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1), namespaceId2); + + entities = getEntitiesFromId("type_1", entityId6); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0), namespaceId2); - entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1); + entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId6); assertEquals(0, entities.size()); entities = getEntitiesFromId("type_2", "a"); @@ -512,7 +574,7 @@ public void testGetEntitiesWithFromId() throws IOException { entities = getEntitiesFromId("type_2", entityId2); assertEquals(1, entities.size()); verifyEntityInfo(entityId2, entityType2, events2, relEntityMap, - EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0), namespaceId1); entities = getEntitiesFromIdWithWindow("type_2", -456l, null); assertEquals(0, entities.size()); @@ -529,20 +591,30 @@ public void testGetEntitiesWithFromId() throws IOException { // same tests with primary filters entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, entityId1); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, entityId1b); - assertEquals(1, entities.size()); + assertEquals(2, entities.size()); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(1), namespaceId2); + + entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter, + entityId6); + assertEquals(1, entities.size()); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(0), namespaceId2); entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l, - entityId1, userFilter); + entityId6, userFilter); assertEquals(0, entities.size()); entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a"); @@ -555,13 +627,13 @@ public void testGetEntitiesWithFromTs() throws IOException { assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, beforeTs).size()); long afterTs = System.currentTimeMillis(); - assertEquals(2, getEntitiesFromTs("type_1", afterTs).size()); + assertEquals(3, getEntitiesFromTs("type_1", afterTs).size()); assertEquals(1, getEntitiesFromTs("type_2", afterTs).size()); - assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + assertEquals(3, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, afterTs).size()); - assertEquals(2, getEntities("type_1").size()); + assertEquals(3, getEntities("type_1").size()); assertEquals(1, getEntities("type_2").size()); - assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); + assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size()); // check insert time is not overwritten long beforeTs = this.beforeTs; loadTestEntityData(); @@ -569,9 +641,9 @@ public void testGetEntitiesWithFromTs() throws IOException { assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size()); assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, beforeTs).size()); - assertEquals(2, getEntitiesFromTs("type_1", afterTs).size()); + assertEquals(3, getEntitiesFromTs("type_1", afterTs).size()); assertEquals(1, getEntitiesFromTs("type_2", afterTs).size()); - assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, + assertEquals(3, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter, afterTs).size()); } @@ -589,32 +661,40 @@ public void testGetEntitiesWithPrimaryFilters() throws IOException { List entities = getEntitiesWithPrimaryFilter("type_1", userFilter); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesWithPrimaryFilter("type_2", userFilter); assertEquals(0, entities.size()); @@ -622,12 +702,12 @@ public void testGetEntitiesWithPrimaryFilters() throws IOException { entities = getEntities("type_1", 1l, null, null, userFilter, null); assertEquals(1, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); entities = getEntities("type_1", 1l, 0l, null, userFilter, null); assertEquals(1, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); entities = getEntities("type_1", null, 234l, null, userFilter, null); assertEquals(0, entities.size()); @@ -636,29 +716,35 @@ public void testGetEntitiesWithPrimaryFilters() throws IOException { assertEquals(0, entities.size()); entities = getEntities("type_1", null, null, 345l, userFilter, null); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); } public void testGetEntitiesWithSecondaryFilters() throws IOException { // test using secondary filter List entities = getEntitiesWithFilters("type_1", null, goodTestingFilters); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesWithFilters("type_1", userFilter, goodTestingFilters); - assertEquals(2, entities.size()); + assertEquals(3, entities.size()); verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(0)); + primaryFilters, otherInfo, entities.get(0), namespaceId1); verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, - primaryFilters, otherInfo, entities.get(1)); + primaryFilters, otherInfo, entities.get(1), namespaceId1); + verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, entities.get(2), namespaceId2); entities = getEntitiesWithFilters("type_1", null, Collections.singleton(new NameValuePair("user", "none"))); @@ -737,10 +823,10 @@ public void testGetEvents() throws IOException { protected static void verifyEntityInfo(String entityId, String entityType, List events, Map> relatedEntities, Map> primaryFilters, Map otherInfo, - Long startTime, TimelineEntity retrievedEntityInfo) { + Long startTime, TimelineEntity retrievedEntityInfo, String namespaceId) { verifyEntityInfo(entityId, entityType, events, relatedEntities, - primaryFilters, otherInfo, retrievedEntityInfo); + primaryFilters, otherInfo, retrievedEntityInfo, namespaceId); assertEquals(startTime, retrievedEntityInfo.getStartTime()); } @@ -750,13 +836,14 @@ protected static void verifyEntityInfo(String entityId, String entityType, protected static void verifyEntityInfo(String entityId, String entityType, List events, Map> relatedEntities, Map> primaryFilters, Map otherInfo, - TimelineEntity retrievedEntityInfo) { + TimelineEntity retrievedEntityInfo, String namespaceId) { if (entityId == null) { assertNull(retrievedEntityInfo); return; } assertEquals(entityId, retrievedEntityInfo.getEntityId()); assertEquals(entityType, retrievedEntityInfo.getEntityType()); + assertEquals(namespaceId, retrievedEntityInfo.getNamespaceId()); if (events == null) { assertNull(retrievedEntityInfo.getEvents()); } else { @@ -801,7 +888,7 @@ protected static TimelineEntity createEntity(String entityId, String entityType, Long startTime, List events, Map> relatedEntities, Map> primaryFilters, - Map otherInfo) { + Map otherInfo, String namespaceId) { TimelineEntity entity = new TimelineEntity(); entity.setEntityId(entityId); entity.setEntityType(entityType); @@ -818,6 +905,7 @@ protected static TimelineEntity createEntity(String entityId, String entityType, } entity.setPrimaryFilters(primaryFilters); entity.setOtherInfo(otherInfo); + entity.setNamespaceId(namespaceId); return entity; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java index 2e64db7..4a4aef6 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineACLsManager.java @@ -18,32 +18,54 @@ package org.apache.hadoop.yarn.server.timeline.security; +import java.io.IOException; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineNamespace; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore; import org.apache.hadoop.yarn.server.timeline.TimelineStore; import org.junit.Assert; import org.junit.Test; public class TestTimelineACLsManager { + private static TimelineNamespace namespace; + + static { + namespace = new TimelineNamespace(); + namespace.setId("namespace_id_1"); + namespace.setOwner("owner"); + namespace.setReaders("reader"); + namespace.setWriters("writer"); + } + @Test public void testYarnACLsNotEnabledForEntity() throws Exception { Configuration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); TimelineACLsManager timelineACLsManager = new TimelineACLsManager(conf); + timelineACLsManager.setTimelineStore(new TestTimelineStore()); TimelineEntity entity = new TimelineEntity(); entity.addPrimaryFilter( TimelineStore.SystemFilter.ENTITY_OWNER .toString(), "owner"); + entity.setNamespaceId("namespace_id_1"); Assert.assertTrue( "Always true when ACLs are not enabled", timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("user"), entity)); + UserGroupInformation.createRemoteUser("user"), + ApplicationAccessType.VIEW_APP, entity)); + Assert.assertTrue( + "Always true when ACLs are not enabled", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("user"), + ApplicationAccessType.MODIFY_APP, entity)); } @Test @@ -53,22 +75,53 @@ public void testYarnACLsEnabledForEntity() throws Exception { conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); TimelineACLsManager timelineACLsManager = new TimelineACLsManager(conf); + timelineACLsManager.setTimelineStore(new TestTimelineStore()); TimelineEntity entity = new TimelineEntity(); entity.addPrimaryFilter( TimelineStore.SystemFilter.ENTITY_OWNER .toString(), "owner"); + entity.setNamespaceId("namespace_id_1"); Assert.assertTrue( - "Owner should be allowed to access", + "Owner should be allowed to view", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("owner"), + ApplicationAccessType.VIEW_APP, entity)); + Assert.assertTrue( + "Reader should be allowed to view", timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("owner"), entity)); + UserGroupInformation.createRemoteUser("reader"), + ApplicationAccessType.VIEW_APP, entity)); Assert.assertFalse( - "Other shouldn't be allowed to access", + "Other shouldn't be allowed to view", timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("other"), entity)); + UserGroupInformation.createRemoteUser("other"), + ApplicationAccessType.VIEW_APP, entity)); Assert.assertTrue( - "Admin should be allowed to access", + "Admin should be allowed to view", timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("admin"), entity)); + UserGroupInformation.createRemoteUser("admin"), + ApplicationAccessType.VIEW_APP, entity)); + + Assert.assertTrue( + "Owner should be allowed to modify", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("owner"), + ApplicationAccessType.MODIFY_APP, entity)); + Assert.assertTrue( + "Reader should be allowed to modify", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("writer"), + ApplicationAccessType.MODIFY_APP, entity)); + Assert.assertFalse( + "Other shouldn't be allowed to modify", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("other"), + ApplicationAccessType.MODIFY_APP, entity)); + Assert.assertTrue( + "Admin should be allowed to modify", + timelineACLsManager.checkAccess( + UserGroupInformation.createRemoteUser("admin"), + ApplicationAccessType.MODIFY_APP, entity)); } @Test @@ -78,14 +131,16 @@ public void testCorruptedOwnerInfoForEntity() throws Exception { conf.set(YarnConfiguration.YARN_ADMIN_ACL, "owner"); TimelineACLsManager timelineACLsManager = new TimelineACLsManager(conf); + timelineACLsManager.setTimelineStore(new TestTimelineStore()); TimelineEntity entity = new TimelineEntity(); try { timelineACLsManager.checkAccess( - UserGroupInformation.createRemoteUser("owner"), entity); + UserGroupInformation.createRemoteUser("owner"), + ApplicationAccessType.VIEW_APP, entity); Assert.fail("Exception is expected"); } catch (YarnException e) { Assert.assertTrue("It's not the exact expected exception", e.getMessage() - .contains("is corrupted.")); + .contains("doesn't exist.")); } } @@ -144,4 +199,15 @@ public void testCorruptedOwnerInfoForNamespace() throws Exception { } } + private static class TestTimelineStore extends MemoryTimelineStore { + @Override + public TimelineNamespace getNamespace( + String namespaceId) throws IOException { + if (namespaceId == null) { + return null; + } else { + return namespace; + } + } + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java index fad78f7..ba751b1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServices.java @@ -95,11 +95,14 @@ protected void configureServlets() { Configuration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); timelineACLsManager = new TimelineACLsManager(conf); + timelineACLsManager.setTimelineStore(store); conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); adminACLsManager = new AdminACLsManager(conf); TimelineDataManager timelineDataManager = new TimelineDataManager(store, timelineACLsManager); + timelineDataManager.init(conf); + timelineDataManager.start(); bind(TimelineDataManager.class).toInstance(timelineDataManager); serve("/*").with(GuiceContainer.class); TimelineAuthenticationFilter taFilter = @@ -182,7 +185,7 @@ public void testAbout() throws Exception { private static void verifyEntities(TimelineEntities entities) { Assert.assertNotNull(entities); - Assert.assertEquals(2, entities.getEntities().size()); + Assert.assertEquals(3, entities.getEntities().size()); TimelineEntity entity1 = entities.getEntities().get(0); Assert.assertNotNull(entity1); Assert.assertEquals("id_1", entity1.getEntityId()); @@ -199,6 +202,14 @@ private static void verifyEntities(TimelineEntities entities) { Assert.assertEquals(2, entity2.getEvents().size()); Assert.assertEquals(4, entity2.getPrimaryFilters().size()); Assert.assertEquals(4, entity2.getOtherInfo().size()); + TimelineEntity entity3 = entities.getEntities().get(2); + Assert.assertNotNull(entity2); + Assert.assertEquals("id_6", entity3.getEntityId()); + Assert.assertEquals("type_1", entity3.getEntityType()); + Assert.assertEquals(61l, entity3.getStartTime().longValue()); + Assert.assertEquals(0, entity3.getEvents().size()); + Assert.assertEquals(4, entity3.getPrimaryFilters().size()); + Assert.assertEquals(4, entity3.getOtherInfo().size()); } @Test @@ -220,7 +231,7 @@ public void testFromId() throws Exception { .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(1, response.getEntity(TimelineEntities.class).getEntities() + assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() .size()); response = r.path("ws").path("v1").path("timeline") @@ -228,7 +239,7 @@ public void testFromId() throws Exception { .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() + assertEquals(3, response.getEntity(TimelineEntities.class).getEntities() .size()); } @@ -249,7 +260,7 @@ public void testFromTs() throws Exception { .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - assertEquals(2, response.getEntity(TimelineEntities.class).getEntities() + assertEquals(3, response.getEntity(TimelineEntities.class).getEntities() .size()); } @@ -439,6 +450,7 @@ public void testPostEntities() throws Exception { entity.setEntityId("test id 1"); entity.setEntityType("test type 1"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); entities.addEntity(entity); WebResource r = resource(); // No owner, will be rejected @@ -482,10 +494,11 @@ public void testPostEntitiesWithYarnACLsEnabled() throws Exception { entity.setEntityId("test id 2"); entity.setEntityType("test type 2"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); entities.addEntity(entity); WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("timeline") - .queryParam("user.name", "tester") + .queryParam("user.name", "writer_user_1") .accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) .post(ClientResponse.class, entities); @@ -497,7 +510,7 @@ public void testPostEntitiesWithYarnACLsEnabled() throws Exception { // override/append timeline data in the same entity with different user response = r.path("ws").path("v1").path("timeline") - .queryParam("user.name", "other") + .queryParam("user.name", "writer_user_2") .accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) .post(ClientResponse.class, entities); @@ -513,6 +526,46 @@ public void testPostEntitiesWithYarnACLsEnabled() throws Exception { } @Test + public void testPostEntitiesToDefaultNamespace() throws Exception { + AdminACLsManager oldAdminACLsManager = + timelineACLsManager.setAdminACLsManager(adminACLsManager); + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 7"); + entity.setEntityType("test type 7"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .queryParam("user.name", "anybody_1") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelinePutResponse putResposne = + response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResposne); + Assert.assertEquals(0, putResposne.getErrors().size()); + // verify the entity exists in the store + response = r.path("ws").path("v1").path("timeline") + .path("test type 7").path("test id 7") + .queryParam("user.name", "any_body_2") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("test id 7", entity.getEntityId()); + Assert.assertEquals("test type 7", entity.getEntityType()); + Assert.assertEquals(TimelineDataManager.DEFAULT_NAMESPACE_ID, + entity.getNamespaceId()); + } finally { + timelineACLsManager.setAdminACLsManager(oldAdminACLsManager); + } + } + + @Test public void testGetEntityWithYarnACLsEnabled() throws Exception { AdminACLsManager oldAdminACLsManager = timelineACLsManager.setAdminACLsManager(adminACLsManager); @@ -522,10 +575,11 @@ public void testGetEntityWithYarnACLsEnabled() throws Exception { entity.setEntityId("test id 3"); entity.setEntityType("test type 3"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); entities.addEntity(entity); WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("timeline") - .queryParam("user.name", "tester") + .queryParam("user.name", "writer_user_1") .accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) .post(ClientResponse.class, entities); @@ -533,7 +587,7 @@ public void testGetEntityWithYarnACLsEnabled() throws Exception { // 1. No field specification response = r.path("ws").path("v1").path("timeline") .path("test type 3").path("test id 3") - .queryParam("user.name", "tester") + .queryParam("user.name", "reader_user_1") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); @@ -544,7 +598,7 @@ public void testGetEntityWithYarnACLsEnabled() throws Exception { response = r.path("ws").path("v1").path("timeline") .path("test type 3").path("test id 3") .queryParam("fields", "relatedentities") - .queryParam("user.name", "tester") + .queryParam("user.name", "reader_user_1") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); @@ -555,7 +609,7 @@ public void testGetEntityWithYarnACLsEnabled() throws Exception { response = r.path("ws").path("v1").path("timeline") .path("test type 3").path("test id 3") .queryParam("fields", "primaryfilters") - .queryParam("user.name", "tester") + .queryParam("user.name", "reader_user_1") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); @@ -587,6 +641,7 @@ public void testGetEntitiesWithYarnACLsEnabled() { entity.setEntityId("test id 4"); entity.setEntityType("test type 4"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); entities.addEntity(entity); WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("timeline") @@ -600,16 +655,17 @@ public void testGetEntitiesWithYarnACLsEnabled() { entity.setEntityId("test id 5"); entity.setEntityType("test type 4"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); entities.addEntity(entity); r = resource(); response = r.path("ws").path("v1").path("timeline") - .queryParam("user.name", "other") + .queryParam("user.name", "writer_user_1") .accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) .post(ClientResponse.class, entities); response = r.path("ws").path("v1").path("timeline") - .queryParam("user.name", "other") + .queryParam("user.name", "reader_user_1") .path("test type 4") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); @@ -633,6 +689,7 @@ public void testGetEventsWithYarnACLsEnabled() { entity.setEntityId("test id 5"); entity.setEntityType("test type 5"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); TimelineEvent event = new TimelineEvent(); event.setEventType("event type 1"); event.setTimestamp(System.currentTimeMillis()); @@ -650,6 +707,7 @@ public void testGetEventsWithYarnACLsEnabled() { entity.setEntityId("test id 6"); entity.setEntityType("test type 5"); entity.setStartTime(System.currentTimeMillis()); + entity.setNamespaceId("namespace_id_1"); event = new TimelineEvent(); event.setEventType("event type 2"); event.setTimestamp(System.currentTimeMillis()); @@ -657,14 +715,14 @@ public void testGetEventsWithYarnACLsEnabled() { entities.addEntity(entity); r = resource(); response = r.path("ws").path("v1").path("timeline") - .queryParam("user.name", "other") + .queryParam("user.name", "writer_user_1") .accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) .post(ClientResponse.class, entities); response = r.path("ws").path("v1").path("timeline") .path("test type 5").path("events") - .queryParam("user.name", "other") + .queryParam("user.name", "reader_user_1") .queryParam("entityId", "test id 5,test id 6") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServicesWithSSL.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServicesWithSSL.java index 7c1fe16..463dbeb 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServicesWithSSL.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestTimelineWebServicesWithSSL.java @@ -95,6 +95,7 @@ public void testPutEntities() throws Exception { TimelineEntity expectedEntity = new TimelineEntity(); expectedEntity.setEntityType("test entity type"); expectedEntity.setEntityId("test entity id"); + expectedEntity.setNamespaceId("test namespace id"); TimelineEvent event = new TimelineEvent(); event.setEventType("test event type"); event.setTimestamp(0L);