diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java
index 7b1547d..5d16bad 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRe
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.token.Token;
@@ -135,7 +134,4 @@ public class SecureBulkLoadClient {
}
}
- public Path getStagingPath(String bulkToken, byte[] family) throws IOException {
- return SecureBulkLoadUtil.getStagingPath(table.getConfiguration(), bulkToken, family);
- }
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
deleted file mode 100644
index 5af6891..0000000
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.security;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.util.Bytes;
-
-@InterfaceAudience.Private
-public class SecureBulkLoadUtil {
- private final static String BULKLOAD_STAGING_DIR = "hbase.bulkload.staging.dir";
-
- /**
- * This returns the staging path for a given column family.
- * This is needed for clean recovery and called reflectively in LoadIncrementalHFiles
- */
- public static Path getStagingPath(Configuration conf, String bulkToken, byte[] family) {
- Path stageP = new Path(getBaseStagingDir(conf), bulkToken);
- return new Path(stageP, Bytes.toString(family));
- }
-
- public static Path getBaseStagingDir(Configuration conf) {
- String hbaseTmpFsDir =
- conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,
- HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);
- return new Path(conf.get(BULKLOAD_STAGING_DIR, hbaseTmpFsDir));
- }
-}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 256c374..8046ccc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -305,6 +305,9 @@ public final class HConstants {
/** Like the previous, but for old logs that are about to be deleted */
public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";
+ /** Staging dir used by bulk load */
+ public static final String BULKLOAD_STAGING_DIR_NAME = "staging";
+
public static final String CORRUPT_DIR_NAME = "corrupt";
/** Used by HBCK to sideline backup data */
diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml
index 116c7d9..30fc625 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -70,13 +70,6 @@ possible configurations would overwhelm and obscure the important.
- hbase.bulkload.staging.dir
- ${hbase.fs.tmp.dir}
- A staging directory in default file system (HDFS)
- for bulk loading.
-
-
-
hbase.cluster.distributed
false
The mode the cluster will be in. Possible values are
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index 4e7c8d2..3cae4d2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -118,7 +118,6 @@ public class HBaseCommonTestingUtility {
if (deleteOnExit()) this.dataTestDir.deleteOnExit();
createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
- createSubDir("hbase.bulkload.staging.dir", testPath, "staging");
return testPath;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index 0ce7411..9e7fe59 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.fs.HFileSystem;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -63,6 +64,10 @@ public class MasterFileSystem {
private final Path rootdir;
// hbase temp directory used for table construction and deletion
private final Path tempdir;
+ // permissions for the directories under rootDir that need protection
+ private final FsPermission rootDirPerms;
+
+ private boolean isSecurityEnabled;
private final MasterServices services;
@@ -81,6 +86,8 @@ public class MasterFileSystem {
FSUtils.setFsDefault(conf, new Path(this.fs.getUri()));
// make sure the fs has the same conf
fs.setConf(conf);
+ this.rootDirPerms = new FsPermission(conf.get("hbase.rootdir.perms", "700"));
+ this.isSecurityEnabled = "kerberos".equalsIgnoreCase(conf.get("hbase.security.authentication"));
// setup the filesystem variable
createInitialFileSystemLayout();
HFileSystem.addLocationsOrderInterceptor(conf);
@@ -99,8 +106,17 @@ public class MasterFileSystem {
// check if the root directory exists
checkRootDir(this.rootdir, conf, this.fs);
- // check if temp directory exists and clean it
+ // check the directories under rootdir.
+ // Not all the sub-directories are checked. It may be handled by the component that uses the directory.
checkTempDir(this.tempdir, conf, this.fs);
+ checkSubDir(new Path(this.rootdir, HConstants.BASE_NAMESPACE_DIR), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, HConstants.HFILE_ARCHIVE_DIRECTORY), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, HConstants.HREGION_LOGDIR_NAME), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, HConstants.HREGION_OLDLOGDIR_NAME), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, MasterProcedureConstants.MASTER_PROCEDURE_LOGDIR), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, HConstants.CORRUPT_DIR_NAME), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, HConstants.HBCK_SIDELINEDIR_NAME), conf, this.fs);
+ checkSubDir(new Path(this.rootdir, MobConstants.MOB_DIR_NAME), conf, this.fs);
}
public FileSystem getFileSystem() {
@@ -146,17 +162,10 @@ public class MasterFileSystem {
// If FS is in safe mode wait till out of it.
FSUtils.waitOnSafeMode(c, c.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000));
- boolean isSecurityEnabled = "kerberos".equalsIgnoreCase(c.get("hbase.security.authentication"));
- FsPermission rootDirPerms = new FsPermission(c.get("hbase.rootdir.perms", "700"));
-
// Filesystem is good. Go ahead and check for hbase.rootdir.
try {
if (!fs.exists(rd)) {
- if (isSecurityEnabled) {
- fs.mkdirs(rd, rootDirPerms);
- } else {
- fs.mkdirs(rd);
- }
+ fs.mkdirs(rd);
// DFS leaves safe mode with 0 DNs when there are 0 blocks.
// We used to handle this by checking the current DN count and waiting until
// it is nonzero. With security, the check for datanode count doesn't work --
@@ -171,16 +180,6 @@ public class MasterFileSystem {
if (!fs.isDirectory(rd)) {
throw new IllegalArgumentException(rd.toString() + " is not a directory");
}
- if (isSecurityEnabled && !rootDirPerms.equals(fs.getFileStatus(rd).getPermission())) {
- // check whether the permission match
- LOG.warn("Found rootdir permissions NOT matching expected \"hbase.rootdir.perms\" for "
- + "rootdir=" + rd.toString() + " permissions=" + fs.getFileStatus(rd).getPermission()
- + " and \"hbase.rootdir.perms\" configured as "
- + c.get("hbase.rootdir.perms", "700") + ". Automatically setting the permissions. You"
- + " can change the permissions by setting \"hbase.rootdir.perms\" in hbase-site.xml "
- + "and restarting the master");
- fs.setPermission(rd, rootDirPerms);
- }
// as above
FSUtils.checkVersion(fs, rd, true, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
@@ -239,8 +238,49 @@ public class MasterFileSystem {
}
// Create the temp directory
- if (!fs.mkdirs(tmpdir)) {
- throw new IOException("HBase temp directory '" + tmpdir + "' creation failure.");
+ if (isSecurityEnabled) {
+ if (!fs.mkdirs(tmpdir, rootDirPerms)) {
+ throw new IOException("HBase temp directory '" + tmpdir + "' creation failure.");
+ }
+ } else {
+ if (!fs.mkdirs(tmpdir)) {
+ throw new IOException("HBase temp directory '" + tmpdir + "' creation failure.");
+ }
+ }
+ }
+
+ /**
+ * Make sure the directories under rootDir have good permissions. Create it if necessary.
+ * @param p
+ * @param c
+ * @param fs
+ * @throws IOException
+ */
+ private void checkSubDir(final Path p, final Configuration c, final FileSystem fs)
+ throws IOException {
+
+ if (!fs.exists(p)) {
+ if (isSecurityEnabled) {
+ if (!fs.mkdirs(p, rootDirPerms)) {
+ throw new IOException("HBase directory '" + p + "' creation failure.");
+ }
+ } else {
+ if (!fs.mkdirs(p)) {
+ throw new IOException("HBase directory '" + p + "' creation failure.");
+ }
+ }
+ }
+ else {
+ if (isSecurityEnabled && !rootDirPerms.equals(fs.getFileStatus(p).getPermission())) {
+ // check whether the permission match
+ LOG.warn("Found HBase directory permissions NOT matching expected \"hbase.rootdir.perms\" for "
+ + p.toString() + " permissions=" + fs.getFileStatus(p).getPermission()
+ + " and \"hbase.rootdir.perms\" configured as "
+ + c.get("hbase.rootdir.perms", "700") + ". Automatically setting the permissions. You"
+ + " can change the permissions by setting \"hbase.rootdir.perms\" in hbase-site.xml "
+ + "and restarting the master");
+ fs.setPermission(p, rootDirPerms);
+ }
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
index ab0dd4f..6941d81 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
@@ -105,11 +105,6 @@ public class MasterWalManager {
this.distributedLogReplay = this.splitLogManager.isLogReplaying();
this.oldLogDir = new Path(rootDir, HConstants.HREGION_OLDLOGDIR_NAME);
-
- // Make sure the region servers can archive their old logs
- if (!this.fs.exists(oldLogDir)) {
- this.fs.mkdirs(oldLogDir);
- }
}
public void stop() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
index 9f53ac5..f2ec095 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
@@ -38,12 +39,12 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequ
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
import org.apache.hadoop.hbase.regionserver.Region.BulkLoadListener;
-import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSHDFSUtils;
+import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Methods;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.Text;
@@ -99,6 +100,8 @@ public class SecureBulkLoadManager {
private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x");
+ private final static String BULKLOAD_STAGING_DIR = "hbase.bulkload.staging.dir";
+
private SecureRandom random;
private FileSystem fs;
private Configuration conf;
@@ -115,10 +118,14 @@ public class SecureBulkLoadManager {
public void start() {
random = new SecureRandom();
- baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf);
this.userProvider = UserProvider.instantiate(conf);
try {
+ baseStagingDir = new Path(FSUtils.getRootDir(conf), HConstants.BULKLOAD_STAGING_DIR_NAME);
+
+ if (conf.get(BULKLOAD_STAGING_DIR) != null) {
+ LOG.warn(BULKLOAD_STAGING_DIR + " is deprecated. Bulkload staging directory is " + baseStagingDir);
+ }
fs = FileSystem.get(conf);
fs.mkdirs(baseStagingDir, PERM_HIDDEN);
fs.setPermission(baseStagingDir, PERM_HIDDEN);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
index 9893e7e..cc02fb6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
/**
@@ -82,7 +83,7 @@ public class HFileReplicator {
private UserProvider userProvider;
private Configuration conf;
private Connection connection;
- private String hbaseStagingDir;
+ private Path hbaseStagingDir;
private ThreadPoolExecutor exec;
private int maxCopyThreads;
private int copiesPerThread;
@@ -100,7 +101,7 @@ public class HFileReplicator {
userProvider = UserProvider.instantiate(conf);
fsDelegationToken = new FsDelegationToken(userProvider, "renewer");
- this.hbaseStagingDir = conf.get("hbase.bulkload.staging.dir");
+ this.hbaseStagingDir = new Path(FSUtils.getRootDir(conf), HConstants.BULKLOAD_STAGING_DIR_NAME);
this.maxCopyThreads =
this.conf.getInt(REPLICATION_BULKLOAD_COPY_MAXTHREADS_KEY,
REPLICATION_BULKLOAD_COPY_MAXTHREADS_DEFAULT);
@@ -253,7 +254,7 @@ public class HFileReplicator {
// Create staging directory for each table
Path stagingDir =
- createStagingDir(new Path(hbaseStagingDir), user, TableName.valueOf(tableName));
+ createStagingDir(hbaseStagingDir, user, TableName.valueOf(tableName));
familyHFilePathsPairsList = tableEntry.getValue();
familyHFilePathsPairsListSize = familyHFilePathsPairsList.size();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
index cc10fad..d1d0b0f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
@@ -142,8 +142,7 @@ public class HFileCorruptionChecker {
Path tableDir = regionDir.getParent();
// build up the corrupted dirs strcture
- Path corruptBaseDir = new Path(FSUtils.getRootDir(conf), conf.get(
- "hbase.hfile.quarantine.dir", HConstants.CORRUPT_DIR_NAME));
+ Path corruptBaseDir = new Path(FSUtils.getRootDir(conf), HConstants.CORRUPT_DIR_NAME);
Path corruptTableDir = new Path(corruptBaseDir, tableDir.getName());
Path corruptRegionDir = new Path(corruptTableDir, regionDir.getName());
Path corruptFamilyDir = new Path(corruptRegionDir, cfDir.getName());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 3e27834..f4844db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -477,8 +477,7 @@ public class WALSplitter {
final List corruptedLogs,
final List processedLogs, final Path oldLogDir,
final FileSystem fs, final Configuration conf) throws IOException {
- final Path corruptDir = new Path(FSUtils.getRootDir(conf), conf.get(
- "hbase.regionserver.hlog.splitlog.corrupt.dir", HConstants.CORRUPT_DIR_NAME));
+ final Path corruptDir = new Path(FSUtils.getRootDir(conf), HConstants.CORRUPT_DIR_NAME);
if (!fs.mkdirs(corruptDir)) {
LOG.info("Unable to mkdir " + corruptDir);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index a6dc59f..6548db4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1278,7 +1278,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
} else {
LOG.info("The hbase.fs.tmp.dir is set to " + hbaseFsTmpDirInString);
}
- this.conf.set("hbase.bulkload.staging.dir", this.conf.get("hbase.fs.tmp.dir"));
}
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 9f2596c..b8c6d8f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -47,10 +47,10 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.regionserver.BloomType;
-import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileTestUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -302,7 +302,7 @@ public class TestLoadIncrementalHFiles {
}
// verify staging folder has been cleaned up
- Path stagingBasePath = SecureBulkLoadUtil.getBaseStagingDir(util.getConfiguration());
+ Path stagingBasePath = new Path(FSUtils.getRootDir(util.getConfiguration()), HConstants.BULKLOAD_STAGING_DIR_NAME);
if(fs.exists(stagingBasePath)) {
FileStatus[] files = fs.listStatus(stagingBasePath);
for(FileStatus file : files) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
index 9ecc5d6..d195347 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos;
-import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.token.Token;
@@ -166,7 +165,4 @@ public class SecureBulkLoadEndpointClient {
}
}
- public Path getStagingPath(String bulkToken, byte[] family) throws IOException {
- return SecureBulkLoadUtil.getStagingPath(table.getConfiguration(), bulkToken, family);
- }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
index 467fcb4..23b4521 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
@@ -1078,8 +1078,7 @@ public class TestWALSplit {
useDifferentDFSClient();
WALSplitter.split(HBASEDIR, WALDIR, OLDLOGDIR, fs, conf, wals);
- final Path corruptDir = new Path(FSUtils.getRootDir(conf), conf.get(
- "hbase.regionserver.hlog.splitlog.corrupt.dir", HConstants.CORRUPT_DIR_NAME));
+ final Path corruptDir = new Path(FSUtils.getRootDir(conf), HConstants.CORRUPT_DIR_NAME);
assertEquals(1, fs.listStatus(corruptDir).length);
}