diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index dee5fb0..7db990f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -115,7 +115,7 @@ public class WALSplitter { public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false; // Parameters for split process - protected final Path rootDir; + protected final Path walDir; protected final FileSystem fs; protected final Configuration conf; @@ -148,14 +148,14 @@ public class WALSplitter { @VisibleForTesting - WALSplitter(final WALFactory factory, Configuration conf, Path rootDir, + WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem fs, LastSequenceId idChecker, SplitLogWorkerCoordination splitLogWorkerCoordination) { this.conf = HBaseConfiguration.create(conf); String codecClassName = conf .get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName()); this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName); - this.rootDir = rootDir; + this.walDir = walDir; this.fs = fs; this.sequenceIdChecker = idChecker; this.splitLogWorkerCoordination = splitLogWorkerCoordination; @@ -186,11 +186,11 @@ public class WALSplitter { *
* @return false if it is interrupted by the progress-able.
*/
- public static boolean splitLogFile(Path rootDir, FileStatus logfile, FileSystem fs,
+ public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem fs,
Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,
SplitLogWorkerCoordination splitLogWorkerCoordination, final WALFactory factory)
throws IOException {
- WALSplitter s = new WALSplitter(factory, conf, rootDir, fs, idChecker,
+ WALSplitter s = new WALSplitter(factory, conf, walDir, fs, idChecker,
splitLogWorkerCoordination);
return s.splitLogFile(logfile, reporter);
}
@@ -322,10 +322,10 @@ public class WALSplitter {
LOG.warn("Could not parse, corrupted WAL={}", logPath, e);
if (splitLogWorkerCoordination != null) {
// Some tests pass in a csm of null.
- splitLogWorkerCoordination.markCorrupted(rootDir, logfile.getPath().getName(), fs);
+ splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), fs);
} else {
// for tests only
- ZKSplitLog.markCorrupted(rootDir, logfile.getPath().getName(), fs);
+ ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), fs);
}
isCorrupted = true;
} catch (IOException e) {
@@ -457,18 +457,19 @@ public class WALSplitter {
* logEntry: e.g. /hbase/some_table/2323432434/recovered.edits/2332.
* This method also ensures existence of RECOVERED_EDITS_DIR under the region
* creating it if necessary.
- * @param fs
* @param logEntry
- * @param rootDir HBase root dir.
* @param fileNameBeingSplit the file being split currently. Used to generate tmp file name.
+ * @param conf
* @return Path to file into which to dump split log edits.
* @throws IOException
*/
@SuppressWarnings("deprecation")
@VisibleForTesting
- static Path getRegionSplitEditsPath(final FileSystem fs,
- final Entry logEntry, final Path rootDir, String fileNameBeingSplit)
+ static Path getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
+ Configuration conf)
throws IOException {
+ FileSystem fs = FileSystem.get(conf);
+ Path rootDir = FSUtils.getRootDir(conf);
Path tableDir = FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName());
String encodedRegionName = Bytes.toString(logEntry.getKey().getEncodedRegionName());
Path regiondir = HRegion.getRegionDir(tableDir, encodedRegionName);
@@ -1470,7 +1471,7 @@ public class WALSplitter {
if (blacklistedRegions.contains(region)) {
return null;
}
- ret = createWAP(region, entry, rootDir);
+ ret = createWAP(region, entry);
if (ret == null) {
blacklistedRegions.add(region);
return null;
@@ -1484,16 +1485,18 @@ public class WALSplitter {
/**
* @return a path with a write for that path. caller should close.
*/
- WriterAndPath createWAP(byte[] region, Entry entry, Path rootdir) throws IOException {
- Path regionedits = getRegionSplitEditsPath(fs, entry, rootdir, fileBeingSplit.getPath().getName());
+ WriterAndPath createWAP(byte[] region, Entry entry) throws IOException {
+ Path regionedits = getRegionSplitEditsPath(entry,
+ fileBeingSplit.getPath().getName(), conf);
if (regionedits == null) {
return null;
}
- if (fs.exists(regionedits)) {
+ FileSystem rootFs = FileSystem.get(conf);
+ if (rootFs.exists(regionedits)) {
LOG.warn("Found old edits file. It could be the "
+ "result of a previous failed split attempt. Deleting " + regionedits + ", length="
- + fs.getFileStatus(regionedits).getLen());
- if (!fs.delete(regionedits, false)) {
+ + rootFs.getFileStatus(regionedits).getLen());
+ if (!rootFs.delete(regionedits, false)) {
LOG.warn("Failed delete of old {}", regionedits);
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
index b1fe67b..fd2b3c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
@@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hdfs.DistributedFileSystem;
@@ -126,6 +127,7 @@ public class TestWALFactory {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
+ CommonFSUtils.setWALRootDir(TEST_UTIL.getConfiguration(), new Path("file:///tmp/wal"));
// Make block sizes small.
TEST_UTIL.getConfiguration().setInt("dfs.blocksize", 1024 * 1024);
// needed for testAppendClose()
@@ -176,7 +178,7 @@ public class TestWALFactory {
final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(1);
final int howmany = 3;
RegionInfo[] infos = new RegionInfo[3];
- Path tabledir = FSUtils.getTableDir(hbaseWALDir, tableName);
+ Path tabledir = FSUtils.getTableDir(hbaseDir, tableName);
fs.mkdirs(tabledir);
for (int i = 0; i < howmany; i++) {
infos[i] = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("" + i))
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
index 030c99f..0d5aa0d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
@@ -390,8 +390,8 @@ public class TestWALSplit {
new Entry(new WALKeyImpl(encoded,
TableName.META_TABLE_NAME, 1, now, HConstants.DEFAULT_CLUSTER_ID),
new WALEdit());
- Path p = WALSplitter.getRegionSplitEditsPath(fs, entry, HBASEDIR,
- FILENAME_BEING_SPLIT);
+ Path p = WALSplitter.getRegionSplitEditsPath(entry,
+ FILENAME_BEING_SPLIT, conf);
String parentOfParent = p.getParent().getParent().getName();
assertEquals(parentOfParent, RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedName());
}
@@ -416,8 +416,8 @@ public class TestWALSplit {
assertEquals(HConstants.RECOVERED_EDITS_DIR, parent.getName());
fs.createNewFile(parent); // create a recovered.edits file
- Path p = WALSplitter.getRegionSplitEditsPath(fs, entry, HBASEDIR,
- FILENAME_BEING_SPLIT);
+ Path p = WALSplitter.getRegionSplitEditsPath(entry,
+ FILENAME_BEING_SPLIT, conf);
String parentOfParent = p.getParent().getParent().getName();
assertEquals(parentOfParent, RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedName());
WALFactory.createRecoveredEditsWriter(fs, p, conf).close();