From a96c96440c0c480d042b4851e810ac4dbbe9e1c1 Mon Sep 17 00:00:00 2001 From: Sakthi Date: Mon, 17 Dec 2018 12:19:20 -0800 Subject: [PATCH] HBASE-20984: Add/Modify test case to check custom hbase.wal.dir outside hdfs filesystem --- .../hadoop/hbase/HBaseTestingUtility.java | 17 ++++++++++++----- .../apache/hadoop/hbase/wal/TestWALFactory.java | 2 -- .../apache/hadoop/hbase/wal/TestWALRootDir.java | 16 +++++++++++++++- 3 files changed, 27 insertions(+), 8 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 31a7cad3f5ec5bff779cf752dadaac4dec7c6f8c..db98369c2440c505e0cf862858ef02f58fcd5a78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -551,16 +551,16 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { /** * Sets up a new path in test filesystem to be used by tests. */ - private Path getNewDataTestDirOnTestFS() throws IOException { + private Path getNewDataTestDirOnTestFS(FileSystem fs) throws IOException { //The file system can be either local, mini dfs, or if the configuration //is supplied externally, it can be an external cluster FS. If it is a local //file system, the tests should use getBaseTestDir, otherwise, we can use //the working directory, and create a unique sub dir there - FileSystem fs = getTestFileSystem(); + Path newDataTestDir; String randomStr = getRandomUUID().toString(); if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) { - newDataTestDir = new Path(getDataTestDir(), randomStr); + newDataTestDir = fs.makeQualified(new Path(getDataTestDir(), randomStr)); File dataTestDir = new File(newDataTestDir.toString()); if (deleteOnExit()) dataTestDir.deleteOnExit(); } else { @@ -571,6 +571,10 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { return newDataTestDir; } + private Path getNewDataTestDirOnTestFS() throws IOException { + return getNewDataTestDirOnTestFS(getTestFileSystem()); + } + /** * Cleans the test data directory on the test filesystem. * @return True if we removed the test dirs @@ -1353,8 +1357,11 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * @throws IOException */ public Path createWALRootDir() throws IOException { - FileSystem fs = FileSystem.get(this.conf); - Path walDir = getNewDataTestDirOnTestFS(); + return createWALRootDir(FileSystem.get(this.conf)); + } + + public Path createWALRootDir(FileSystem fs) throws IOException { + Path walDir = getNewDataTestDirOnTestFS(fs); FSUtils.setWALRootDir(this.conf, walDir); fs.mkdirs(walDir); return walDir; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index 8fbe09dd30bac8f12165b1aa9964054041bfd2f4..1b51751bc1e15a1e27fc2e716d3ccb6c3770cb16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -59,7 +59,6 @@ import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.wal.WALFactory.Providers; @@ -130,7 +129,6 @@ public class TestWALFactory { @BeforeClass public static void setUpBeforeClass() throws Exception { - CommonFSUtils.setWALRootDir(TEST_UTIL.getConfiguration(), new Path("file:///tmp/wal")); // Make block sizes small. TEST_UTIL.getConfiguration().setInt("dfs.blocksize", 1024 * 1024); // needed for testAppendClose() diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java index 40fad6ad5208c8ac0b728923b7e42d97ec8779db..0e62daa3438cf25066c16f3f6bfcd21d1b03dc97 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java @@ -26,6 +26,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -88,7 +89,20 @@ public class TestWALRootDir { } @Test - public void testWALRootDir() throws Exception { + public void testWALRootDirInMultipleFS() throws Exception { + // Test WALRootDir in dfs + testWALRootDir(); + cleanup(); + + // Test WALRootDir in local fs + walRootDir = TEST_UTIL.createWALRootDir(new LocalFileSystem()); + walFs = FSUtils.getWALFileSystem(conf); + testWALRootDir(); + } + + private void testWALRootDir() throws Exception { + LOG.debug("Current HBase Root Dir is {}", rootDir); + LOG.debug("Current WAL Root Dir is {}", walRootDir); RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).build(); wals = new WALFactory(conf, "testWALRootDir"); WAL log = wals.getWAL(regionInfo); -- 2.17.2 (Apple Git-113)