Index: src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java (revision 1055187) +++ src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java (working copy) @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; +import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.ipc.RemoteException; import org.junit.After; import org.junit.AfterClass; @@ -672,6 +673,33 @@ assertTrue(ioe.toString().contains("Injected")); } } + + // Test for HBASE-3412 + @Test + public void testMovedHLogDuringRecovery() throws Exception { + generateHLogs(-1); + + fs.initialize(fs.getUri(), conf); + + // This partial mock will throw LEE for every file simulating + // files that were moved + FileSystem spiedFs = Mockito.spy(fs); + // The "File does not exist" part is very important, + // that's how it comes out of HDFS + Mockito.doThrow(new LeaseExpiredException("Injected: File does not exist")). + when(spiedFs).append(Mockito.any()); + + HLogSplitter logSplitter = new HLogSplitter( + conf, hbaseDir, hlogDir, oldLogDir, spiedFs); + + try { + logSplitter.splitLog(); + assertEquals(NUM_WRITERS, fs.listStatus(oldLogDir).length); + assertFalse(fs.exists(hlogDir)); + } catch (IOException e) { + fail("There shouldn't be any exception but: " + e.toString()); + } + } /** * Test log split process with fake data and lots of edits to trigger threading Index: src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (revision 1055187) +++ src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (working copy) @@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.util.FSUtils.recoverFileLease; import java.io.EOFException; +import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -262,8 +263,15 @@ processedLogs.add(logPath); } catch (EOFException eof) { // truncated files are expected if a RS crashes (see HBASE-2643) - LOG.info("EOF from hlog " + logPath + ". continuing"); + LOG.info("EOF from hlog " + logPath + ". Continuing"); processedLogs.add(logPath); + } catch (FileNotFoundException fnfe) { + // A file may be missing if the region server was able to archive it + // before shutting down. This means the edits were persisted already + LOG.info("A log was missing " + logPath + + ", probably because it was moved by the" + + " now dead region server. Continuing"); + processedLogs.add(logPath); } catch (IOException e) { // If the IOE resulted from bad file format, // then this problem is idempotent and retrying won't help Index: src/main/java/org/apache/hadoop/hbase/util/FSUtils.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (revision 1055187) +++ src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (working copy) @@ -36,10 +36,12 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.FSConstants; +import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.io.SequenceFile; import java.io.DataInputStream; import java.io.EOFException; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; @@ -649,6 +651,11 @@ } catch (InterruptedException ex) { // ignore it and try again } + } else if (e instanceof LeaseExpiredException && + e.getMessage().contains("File does not exist")) { + // This exception comes out instead of FNFE, fix it + throw new FileNotFoundException( + "The given HLog wasn't found at " + p.toString()); } else { throw new IOException("Failed to open " + p + " for append", e); }