diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java index e37308de575..089f62cad44 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java @@ -50,9 +50,9 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.webapp.View.ViewContext; import org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block; import org.slf4j.Logger; @@ -109,10 +109,16 @@ protected int retentionSize; protected String fileControllerName; + protected ApplicationACLsManager applicationACLsManager; protected boolean fsSupportsChmod = true; public LogAggregationFileController() {} + @VisibleForTesting + public LogAggregationFileController(ApplicationACLsManager acLsManager) { + this.applicationACLsManager = acLsManager; + } + /** * Initialize the log file controller. * @param conf the Configuration @@ -132,6 +138,10 @@ public void initialize(Configuration conf, String controllerName) { this.retentionSize = configuredRetentionSize; } this.fileControllerName = controllerName; + if( applicationACLsManager == null) { + applicationACLsManager = new ApplicationACLsManager(conf); + } + initInternal(conf); } @@ -248,6 +258,30 @@ public abstract String getApplicationOwner(Path aggregatedLogPath, public abstract Map getApplicationAcls( Path aggregatedLogPath, ApplicationId appId) throws IOException; + /** + * Validate application acls against expected ACL. + * + * @param conf Configuration + * @param appId the ApplicationId + * @param logPath Aggregated Log Path + * @param ugi UGI + * @return true if ACL checks pass + */ + + protected boolean checkAcls(Configuration conf, + UserGroupInformation ugi, ApplicationId appId, + Path logPath, + ApplicationAccessType expectedAccessType) throws IOException { + + applicationACLsManager.addApplication(appId, getApplicationAcls(logPath, appId)); + + if (ugi != null && !applicationACLsManager.checkAccess(ugi, + expectedAccessType, getApplicationOwner(logPath, appId), appId)) { + return false; + } + return true; + } + /** * Verify and create the remote log directory. */ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java index 78b0c134976..6321994648a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java @@ -65,6 +65,7 @@ import org.apache.hadoop.io.file.tfile.Compression; import org.apache.hadoop.io.file.tfile.SimpleBufferedOutputStream; import org.apache.hadoop.io.file.tfile.Compression.Algorithm; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -79,6 +80,8 @@ import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue; import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileController; import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerContext; + +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; import org.apache.hadoop.yarn.util.Times; @@ -133,6 +136,11 @@ public LogAggregationIndexedFileController() {} + @VisibleForTesting + public LogAggregationIndexedFileController(ApplicationACLsManager acLsManager) { + super(acLsManager); + } + @Override public void initInternal(Configuration conf) { // Currently, we need the underlying File System to support append @@ -520,8 +528,8 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest, } IndexedLogsMeta indexedLogsMeta = null; try { - indexedLogsMeta = loadIndexedLogsMeta(thisNodeFile.getPath(), - endIndex, appId); + indexedLogsMeta = loadIndexedLogsMeta(thisNodeFile.getPath(), endIndex, + appId); } catch (Exception ex) { // DO NOTHING LOG.warn("Can not load log meta from the log file:" @@ -531,6 +539,17 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest, if (indexedLogsMeta == null) { continue; } + + //ACLs check + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + if (!checkAcls(conf, ugi, appId, thisNodeFile.getPath(), + ApplicationAccessType.VIEW_APP)) { + LOG.error("User [" + ugi.getShortUserName() + + "] is not authorized to view the logs for " + nodeName); + throw new AccessControlException("User [" + ugi.getShortUserName() + + "] is not authorized to view the logs for " + nodeName); + } + String compressAlgo = indexedLogsMeta.getCompressName(); List candidates = new ArrayList<>(); for (IndexedPerAggregationLogMeta logMeta diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java index b3103d29add..9364e900d6f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java @@ -39,6 +39,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; +import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -55,6 +57,7 @@ import org.apache.hadoop.yarn.logaggregation.ContainerLogsRequest; import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils; import org.apache.hadoop.yarn.logaggregation.LogToolUtils; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.webapp.View.ViewContext; import org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block; @@ -190,6 +193,7 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest, nodeFiles = HarFs.get(p.toUri(), conf).listStatusIterator(p); continue; } + if ((nodeId == null || nodeName.contains(LogAggregationUtils .getNodeString(nodeId))) && !nodeName.endsWith( LogAggregationUtils.TMP_FILE_SUFFIX)) { @@ -197,6 +201,17 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest, try { reader = new AggregatedLogFormat.LogReader(conf, thisNodeFile.getPath()); + + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + + if (!checkAcls(conf, ugi, appId, thisNodeFile.getPath(), + ApplicationAccessType.VIEW_APP)) { + LOG.error("User [" + ugi.getShortUserName() + + "] is not authorized to view the logs for " + nodeName); + throw new AccessControlException("User [" + ugi.getShortUserName() + + "] is not authorized to view the logs for " + nodeName); + } + DataInputStream valueStream; LogKey key = new LogKey(); valueStream = reader.next(key); @@ -213,10 +228,10 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest, String fileType = valueStream.readUTF(); String fileLengthStr = valueStream.readUTF(); long fileLength = Long.parseLong(fileLengthStr); - if (logTypes == null || logTypes.isEmpty() || - logTypes.contains(fileType)) { - LogToolUtils.outputContainerLog(key.toString(), - nodeName, fileType, fileLength, size, + if (logTypes == null || logTypes.isEmpty() || logTypes + .contains(fileType)) { + LogToolUtils.outputContainerLog(key.toString(), nodeName, + fileType, fileLength, size, Times.format(thisNodeFile.getModificationTime()), valueStream, os, buf, ContainerLogAggregationType.AGGREGATED); @@ -224,7 +239,7 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest, Charset.forName("UTF-8")); os.write(b, 0, b.length); findLogs = true; - } else { + } else{ long totalSkipped = 0; long currSkipped = 0; while (currSkipped != -1 && totalSkipped < fileLength) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java index 97b41633872..0af6027ee0d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java @@ -60,6 +60,12 @@ public ApplicationACLsManager(Configuration conf) { this.adminAclsManager = new AdminACLsManager(this.conf); } + @VisibleForTesting + public ApplicationACLsManager(Configuration conf, AdminACLsManager adminAclsMgr) { + this.conf = conf; + this.adminAclsManager = adminAclsMgr; + } + public boolean areACLsEnabled() { return adminAclsManager.areACLsEnabled(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexFileController.java index 79226797578..5bc541f9580 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexFileController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexFileController.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertNotNull; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.ByteArrayOutputStream; @@ -30,6 +31,7 @@ import java.io.Writer; import java.net.URL; import java.nio.charset.Charset; +import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -44,6 +46,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -60,12 +63,16 @@ import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileController; import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerContext; import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerFactory; + +import org.apache.hadoop.yarn.security.AdminACLsManager; +import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.ControlledClock; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; /** * Function test for {@link LogAggregationIndexFileController}. @@ -78,8 +85,10 @@ private final String remoteLogDir = "target/remote-app"; private static final FsPermission LOG_FILE_UMASK = FsPermission .createImmutable((short) (0777)); - private static final UserGroupInformation USER_UGI = UserGroupInformation + private static final UserGroupInformation USER1_UGI = UserGroupInformation .createRemoteUser("testUser"); + private static final UserGroupInformation USER2_UGI = + UserGroupInformation.createRemoteUser("otherUser"); private FileSystem fs; private Configuration conf; private ApplicationId appId; @@ -175,18 +184,12 @@ public boolean isRollover(final FileContext fc, fileFormat.initialize(conf, "Indexed"); Map appAcls = new HashMap<>(); - Path appDir = fileFormat.getRemoteAppLogDir(appId, - USER_UGI.getShortUserName()); - if (fs.exists(appDir)) { - fs.delete(appDir, true); - } - assertTrue(fs.mkdirs(appDir)); Path logPath = fileFormat.getRemoteNodeLogFileForApp( - appId, USER_UGI.getShortUserName(), nodeId); + appId, USER1_UGI.getShortUserName(), nodeId); LogAggregationFileControllerContext context = new LogAggregationFileControllerContext( - logPath, logPath, true, 1000, appId, appAcls, nodeId, USER_UGI); + logPath, logPath, true, 1000, appId, appAcls, nodeId, USER1_UGI); // initialize the writer fileFormat.initializeWriter(context); @@ -197,7 +200,7 @@ public boolean isRollover(final FileContext fc, ContainerLogsRequest logRequest = new ContainerLogsRequest(); logRequest.setAppId(appId); logRequest.setNodeId(nodeId.toString()); - logRequest.setAppOwner(USER_UGI.getShortUserName()); + logRequest.setAppOwner(USER1_UGI.getShortUserName()); logRequest.setContainerId(containerId.toString()); logRequest.setBytes(Long.MAX_VALUE); List meta = fileFormat.readAggregatedLogsMeta( @@ -231,7 +234,7 @@ public boolean isRollover(final FileContext fc, LogAggregationFileControllerFactory factory = new LogAggregationFileControllerFactory(factoryConf); LogAggregationFileController fileController = factory - .getFileControllerForRead(appId, USER_UGI.getShortUserName()); + .getFileControllerForRead(appId, USER1_UGI.getShortUserName()); Assert.assertTrue(fileController instanceof LogAggregationIndexedFileController); foundLogs = fileController.readAggregatedLogs(logRequest, System.out); @@ -244,7 +247,7 @@ public boolean isRollover(final FileContext fc, // create a checksum file Path checksumFile = new Path(fileFormat.getRemoteAppLogDir( - appId, USER_UGI.getShortUserName()), + appId, USER1_UGI.getShortUserName()), LogAggregationUtils.getNodeString(nodeId) + LogAggregationIndexedFileController.CHECK_SUM_FILE_SUFFIX); FSDataOutputStream fInput = null; @@ -366,6 +369,8 @@ public boolean isRollover(final FileContext fc, sysOutStream.reset(); } + + @Test(timeout = 15000) public void testFetchApplictionLogsHar() throws Exception { List newLogTypes = new ArrayList<>(); @@ -378,7 +383,7 @@ public void testFetchApplictionLogsHar() throws Exception { .getResource("application_123456_0001.har"); assertNotNull(harUrl); - Path path = new Path(remoteLogDir + "/" + USER_UGI.getShortUserName() + Path path = new Path(remoteLogDir + "/" + USER1_UGI.getShortUserName() + "/logs/application_123456_0001"); if (fs.exists(path)) { fs.delete(path, true); @@ -393,7 +398,7 @@ public void testFetchApplictionLogsHar() throws Exception { ContainerLogsRequest logRequest = new ContainerLogsRequest(); logRequest.setAppId(appId); logRequest.setNodeId(nodeId.toString()); - logRequest.setAppOwner(USER_UGI.getShortUserName()); + logRequest.setAppOwner(USER1_UGI.getShortUserName()); logRequest.setContainerId(containerId.toString()); logRequest.setBytes(Long.MAX_VALUE); List meta = fileFormat.readAggregatedLogsMeta( @@ -418,6 +423,118 @@ public void testFetchApplictionLogsHar() throws Exception { sysOutStream.reset(); } + @Test(timeout = 15000) + public void testLogAggregationIndexFileFormatWithAcls() throws Exception { + + Configuration factoryConf = new Configuration(conf); + factoryConf.set("yarn.log-aggregation.file-formats", "Indexed"); + factoryConf.set("yarn.log-aggregation.file-controller.Indexed.class", + "org.apache.hadoop.yarn.logaggregation.filecontroller.ifile" + + ".LogAggregationIndexedFileController"); + + factoryConf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + factoryConf.set(YarnConfiguration.YARN_ADMIN_ACL, "yarn"); + + Path path = new Path(remoteLogDir + "/" + USER1_UGI.getShortUserName() + + "/logs/application_123456_0001"); + + if (fs.exists(path)) { + fs.delete(path, true); + } + assertTrue(fs.mkdirs(path)); + + AdminACLsManager mockAdminAclsMgr = Mockito.mock(AdminACLsManager.class); + when(mockAdminAclsMgr.isAdmin(any())).thenReturn(false); + when(mockAdminAclsMgr.areACLsEnabled()).thenReturn(true); + + ApplicationACLsManager applicationACLsManager = new + ApplicationACLsManager(factoryConf, mockAdminAclsMgr); + + final ControlledClock clock = new ControlledClock(); + clock.setTime(System.currentTimeMillis()); + LogAggregationIndexedFileController fileFormat + = new LogAggregationIndexedFileController(applicationACLsManager) { + private int rollOverCheck = 0; + @Override + public Clock getSystemClock() { + return clock; + } + + @Override + public boolean isRollover(final FileContext fc, + final Path candidate) throws IOException { + rollOverCheck++; + if (rollOverCheck >= 3) { + return true; + } + return false; + } + }; + + fileFormat.initialize(conf, "Indexed"); + Path logPath = fileFormat.getRemoteNodeLogFileForApp( + appId, USER1_UGI.getShortUserName(), nodeId); + + Map appAcls = new HashMap<>(); + LogAggregationFileControllerContext context = + new LogAggregationFileControllerContext( + logPath, logPath, true, 1000, appId, appAcls, nodeId, USER1_UGI); + // initialize the writer + fileFormat.initializeWriter(context); + + Set files = new HashSet<>(); + + LogKey key1 = new LogKey(containerId.toString()); + + Path appLogsDir = new Path(rootLocalLogDirPath, appId.toString()); + if (fs.exists(appLogsDir)) { + fs.delete(appLogsDir, true); + } + assertTrue(fs.mkdirs(appLogsDir)); + + List logTypes = new ArrayList(); + logTypes.add("syslog"); + logTypes.add("stdout"); + logTypes.add("stderr"); + + for(String logType : logTypes) { + File file = createAndWriteLocalLogFile(containerId, appLogsDir, + logType); + files.add(file); + } + LogValue value = mock(LogValue.class); + when(value.getPendingLogFilesToUploadForThisContainer()).thenReturn(files); + + fileFormat.write(key1, value); + fileFormat.postWrite(context); + fileFormat.closeWriter(); + + FileStatus[] status = fs.listStatus(logPath.getParent()); + + for (FileStatus fileStatus : status) { + fs.setPermission(fileStatus.getPath(), + FsPermission.createImmutable((short) 0x777)); + } + + ContainerLogsRequest logRequest = new ContainerLogsRequest(); + logRequest.setAppId(appId); + logRequest.setNodeId(nodeId.toString()); + logRequest.setAppOwner(USER1_UGI.getShortUserName()); + logRequest.setContainerId(containerId.toString()); + logRequest.setBytes(Long.MAX_VALUE); + USER2_UGI.doAs(new PrivilegedAction() { + @Override public Boolean run() { + try { + return fileFormat.readAggregatedLogs(logRequest, System.out); + } catch (IOException e) { + //Expected exception + Assert.assertTrue(e instanceof AccessControlException); + } + return false; + } + }); + } + private File createAndWriteLocalLogFile(ContainerId containerId, Path localLogDir, String logType) throws IOException { File file = new File(localLogDir.toString(), logType);