diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 0beec62..3c3c241 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -885,6 +885,10 @@ public static boolean isAclEnabled(Configuration conf) {
NM_PREFIX + "resourcemanager.minimum.version";
public static final String DEFAULT_NM_RESOURCEMANAGER_MINIMUM_VERSION = "NONE";
+ /** Disk Validator. */
+ public static final String DISK_VALIDATOR = NM_PREFIX + "disk-validator";
+ public static final String DEFAULT_DISK_VALIDATOR = "basic";
+
/**
* Maximum size of contain's diagnostics to keep for relaunching container
* case.
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index eabb679..c318ee2 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2732,6 +2732,14 @@
+ The name of disk validator.
+
+ yarn.nodemanager.disk-validator
+ basic
+
+
+
+
Enable the CSRF filter for the timeline service web app
yarn.timeline-service.webapp.rest-csrf.enabled
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java
index a2bfd20..2af02021 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java
@@ -34,10 +34,12 @@
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.DiskValidatorFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.DiskChecker;
import com.google.common.annotations.VisibleForTesting;
@@ -47,6 +49,7 @@
public class DirectoryCollection {
private static final Log LOG = LogFactory.getLog(DirectoryCollection.class);
+ private final Configuration conf;
/**
* The enum defines disk failure type.
*/
@@ -163,6 +166,7 @@ public DirectoryCollection(String[] dirs,
float utilizationPercentageCutOffHigh,
float utilizationPercentageCutOffLow,
long utilizationSpaceCutOff) {
+ conf = new YarnConfiguration();
localDirs = new CopyOnWriteArrayList(dirs);
errorDirs = new CopyOnWriteArrayList();
fullDirs = new CopyOnWriteArrayList();
@@ -330,7 +334,8 @@ synchronized boolean checkDirs() {
String msg;
try {
File testDir = new File(dir);
- DiskChecker.checkDir(testDir);
+ DiskValidatorFactory.getInstance(
+ conf.get(YarnConfiguration.DISK_VALIDATOR)).checkStatus(testDir);
float diskUtilizationPercentageCutoff = goodDirs.contains(dir) ?
diskUtilizationPercentageCutoffHigh : diskUtilizationPercentageCutoffLow;
if (isDiskUsageOverPercentageLimit(testDir,
@@ -380,7 +385,8 @@ private void verifyDirUsingMkdir(File dir) throws IOException {
i++;
}
try {
- DiskChecker.checkDir(target);
+ DiskValidatorFactory.getInstance(
+ conf.get(YarnConfiguration.DISK_VALIDATOR)).checkStatus(target);
} finally {
FileUtils.deleteQuietly(target);
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java
index 57cc346..09aec20 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java
@@ -51,7 +51,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.util.DiskChecker;
+import org.apache.hadoop.util.DiskValidatorFactory;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.api.records.LocalResource;
@@ -114,7 +114,7 @@ public ContainerLocalizer(FileContext lfs, String user, String appId,
this.localDirs = localDirs;
this.localizerId = localizerId;
this.recordFactory = recordFactory;
- this.conf = new Configuration();
+ this.conf = new YarnConfiguration();
this.appCacheDirContextName = String.format(APPCACHE_CTXT_FMT, appId);
this.pendingResources = new HashMap>();
}
@@ -198,7 +198,9 @@ ExecutorService createDownloadThreadPool() {
Callable download(Path path, LocalResource rsrc,
UserGroupInformation ugi) throws IOException {
- DiskChecker.checkDir(new File(path.toUri().getRawPath()));
+ DiskValidatorFactory.getInstance(
+ conf.get(YarnConfiguration.DISK_VALIDATOR)).
+ checkStatus(new File(path.toUri().getRawPath()));
return new FSDownload(lfs, ugi, conf, path, rsrc);
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
index b2413ad..3701c37 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
@@ -71,7 +71,7 @@
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.CompositeService;
-import org.apache.hadoop.util.DiskChecker;
+import org.apache.hadoop.util.DiskValidatorFactory;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.util.concurrent.HadoopScheduledThreadPoolExecutor;
@@ -833,7 +833,9 @@ public void addResource(LocalizerResourceRequestEvent request) {
publicRsrc.getPathForLocalization(key, publicRootPath,
delService);
if (!publicDirDestPath.getParent().equals(publicRootPath)) {
- DiskChecker.checkDir(new File(publicDirDestPath.toUri().getPath()));
+ DiskValidatorFactory.getInstance(
+ conf.get(YarnConfiguration.DISK_VALIDATOR)).checkStatus(
+ new File(publicDirDestPath.toUri().getPath()));
}
// explicitly synchronize pending here to avoid future task