diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 3bb73f5..6b84b86 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -923,6 +923,10 @@ public static boolean isAclEnabled(Configuration conf) { NM_PREFIX + "resourcemanager.minimum.version"; public static final String DEFAULT_NM_RESOURCEMANAGER_MINIMUM_VERSION = "NONE"; + /** Disk Validator. */ + public static final String DISK_VALIDATOR = NM_PREFIX + "disk-validator"; + public static final String DEFAULT_DISK_VALIDATOR = "basic"; + /** * Maximum size of contain's diagnostics to keep for relaunching container * case. diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index ed220c0..4b0054e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -2824,6 +2824,14 @@ + The name of disk validator. + + yarn.nodemanager.disk-validator + basic + + + + Enable the CSRF filter for the timeline service web app yarn.timeline-service.webapp.rest-csrf.enabled diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java index 417b207..72c32e8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java @@ -42,7 +42,11 @@ import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.util.DiskChecker; +import org.apache.hadoop.util.DiskValidator; +import org.apache.hadoop.util.DiskValidatorFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import com.google.common.annotations.VisibleForTesting; @@ -52,6 +56,8 @@ public class DirectoryCollection { private static final Log LOG = LogFactory.getLog(DirectoryCollection.class); + private final Configuration conf; + private final DiskValidator diskValidator; /** * The enum defines disk failure type. */ @@ -172,6 +178,16 @@ public DirectoryCollection(String[] dirs, float utilizationPercentageCutOffHigh, float utilizationPercentageCutOffLow, long utilizationSpaceCutOff) { + conf = new YarnConfiguration(); + try { + diskValidator = DiskValidatorFactory.getInstance( + conf.get(YarnConfiguration.DISK_VALIDATOR)); + LOG.info("Disk Validator: " + YarnConfiguration.DISK_VALIDATOR + + " is loaded."); + } catch (Exception e) { + throw new YarnRuntimeException(e); + } + localDirs = new CopyOnWriteArrayList<>(dirs); errorDirs = new CopyOnWriteArrayList<>(); fullDirs = new CopyOnWriteArrayList<>(); @@ -395,7 +411,7 @@ boolean checkDirs() { String msg; try { File testDir = new File(dir); - DiskChecker.checkDir(testDir); + diskValidator.checkStatus(testDir); float diskUtilizationPercentageCutoff = goodDirs.contains(dir) ? diskUtilizationPercentageCutoffHigh : diskUtilizationPercentageCutoffLow; if (isDiskUsageOverPercentageLimit(testDir, @@ -445,7 +461,7 @@ private void verifyDirUsingMkdir(File dir) throws IOException { i++; } try { - DiskChecker.checkDir(target); + diskValidator.checkStatus(target); } finally { FileUtils.deleteQuietly(target); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java index 04e38fa..8f5ee6b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java @@ -97,6 +97,7 @@ public void setConf(Configuration conf) { resourcesHandler = getResourcesHandler(conf); + containerSchedPriorityIsSet = false; if (conf.get(YarnConfiguration.NM_CONTAINER_EXECUTOR_SCHED_PRIORITY) != null) { containerSchedPriorityIsSet = true; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 65fd9d8..04be631 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -51,7 +51,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.util.DiskChecker; +import org.apache.hadoop.util.DiskValidator; +import org.apache.hadoop.util.DiskValidatorFactory; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -69,7 +70,6 @@ import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.ResourceStatusType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.FSDownload; import com.google.common.annotations.VisibleForTesting; @@ -99,6 +99,7 @@ private final RecordFactory recordFactory; private final Map> pendingResources; private final String appCacheDirContextName; + private final DiskValidator diskValidator; public ContainerLocalizer(FileContext lfs, String user, String appId, String localizerId, List localDirs, @@ -115,7 +116,11 @@ public ContainerLocalizer(FileContext lfs, String user, String appId, this.localDirs = localDirs; this.localizerId = localizerId; this.recordFactory = recordFactory; - this.conf = new Configuration(); + this.conf = new YarnConfiguration(); + this.diskValidator = DiskValidatorFactory.getInstance( + conf.get(YarnConfiguration.DISK_VALIDATOR)); + LOG.info("Disk Validator: " + YarnConfiguration.DISK_VALIDATOR + + " is loaded."); this.appCacheDirContextName = String.format(APPCACHE_CTXT_FMT, appId); this.pendingResources = new HashMap>(); } @@ -199,7 +204,7 @@ ExecutorService createDownloadThreadPool() { Callable download(Path path, LocalResource rsrc, UserGroupInformation ugi) throws IOException { - DiskChecker.checkDir(new File(path.toUri().getRawPath())); + diskValidator.checkStatus(new File(path.toUri().getRawPath())); return new FSDownload(lfs, ugi, conf, path, rsrc); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index 409cc29..d5e60dc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -72,6 +72,8 @@ import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.util.DiskChecker; +import org.apache.hadoop.util.DiskValidator; +import org.apache.hadoop.util.DiskValidatorFactory; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.util.concurrent.HadoopScheduledThreadPoolExecutor; @@ -168,6 +170,7 @@ private DirsChangeListener localDirsChangeListener; private DirsChangeListener logDirsChangeListener; private Context nmContext; + private DiskValidator diskValidator; /** * Map of LocalResourceTrackers keyed by username, for private @@ -247,6 +250,10 @@ public void serviceInit(Configuration conf) throws Exception { "Failed to initialize LocalizationService", e); } + diskValidator = DiskValidatorFactory.getInstance( + conf.get(YarnConfiguration.DISK_VALIDATOR)); + LOG.info("Disk Validator: " + YarnConfiguration.DISK_VALIDATOR + + " is loaded."); cacheTargetSize = conf.getLong(YarnConfiguration.NM_LOCALIZER_CACHE_TARGET_SIZE_MB, YarnConfiguration.DEFAULT_NM_LOCALIZER_CACHE_TARGET_SIZE_MB) << 20; cacheCleanupPeriod = @@ -835,7 +842,13 @@ public void addResource(LocalizerResourceRequestEvent request) { publicRsrc.getPathForLocalization(key, publicRootPath, delService); if (!publicDirDestPath.getParent().equals(publicRootPath)) { - DiskChecker.checkDir(new File(publicDirDestPath.toUri().getPath())); + if (diskValidator != null) { + diskValidator.checkStatus( + new File(publicDirDestPath.toUri().getPath())); + } else { + throw new DiskChecker.DiskErrorException( + "Disk Validator is null!"); + } } // explicitly synchronize pending here to avoid future task diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java index aae0037..d460e62 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java @@ -128,7 +128,7 @@ public void setup() throws IOException, ContainerExecutionException { tmpMockExecutor = System.getProperty("test.build.data") + "/tmp-mock-container-executor"; - Configuration conf = new Configuration(); + Configuration conf = new YarnConfiguration(); LinuxContainerRuntime linuxContainerRuntime; setupMockExecutor(MOCK_EXECUTOR, conf); @@ -220,7 +220,10 @@ public void testContainerLaunchWithPriority() throws IOException { public void testLaunchCommandWithoutPriority() throws IOException { // make sure the command doesn't contain the nice -n since priority // not specified - List command = new ArrayList(); + List command = new ArrayList(); + Configuration conf = mockExec.getConf(); + conf.unset(YarnConfiguration.NM_CONTAINER_EXECUTOR_SCHED_PRIORITY); + mockExec.setConf(conf); mockExec.addSchedPriorityCommand(command); assertEquals("addSchedPriority should be empty", 0, command.size()); } @@ -243,7 +246,7 @@ public void testStartLocalizer() throws IOException { .build()); List result=readMockParams(); - Assert.assertEquals(result.size(), 18); + Assert.assertEquals(result.size(), 19); Assert.assertEquals(result.get(0), YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LOCAL_USER); Assert.assertEquals(result.get(1), "test"); Assert.assertEquals(result.get(2), "0" );