Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java (revision 1544073) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java (working copy) @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -280,6 +281,9 @@ LOG.debug("Read count=" + count + " from " + wal); break; } + if (Arrays.equals(e.getKey().getEncodedRegionName(), HLog.DUMMY_REGION)) { + continue; + } count++; long seqid = e.getKey().getLogSeqNum(); if (sequenceIds.containsKey(Bytes.toString(e.getKey().getEncodedRegionName()))) { Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java (revision 1544073) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java (working copy) @@ -23,6 +23,7 @@ import java.io.IOException; import java.lang.reflect.Method; import java.net.BindException; +import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -462,6 +463,9 @@ int count = 0; HLog.Entry entry = new HLog.Entry(); while (reader.next(entry) != null) { + if (Arrays.equals(entry.getKey().getEncodedRegionName(), HLog.DUMMY_REGION)) { + continue; + } count++; assertTrue("Should be one KeyValue per WALEdit", entry.getEdit().getKeyValues().size() == 1); @@ -513,17 +517,21 @@ reader = HLogFactory.createReader(fs, filename, conf); // Above we added all columns on a single row so we only read one // entry in the below... thats why we have '1'. - for (int i = 0; i < 1; i++) { + for (int count = 0; count < 1;) { HLog.Entry entry = reader.next(null); if (entry == null) break; HLogKey key = entry.getKey(); WALEdit val = entry.getEdit(); + if (Arrays.equals(key.getEncodedRegionName(), HLog.DUMMY_REGION)) { + continue; + } assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName())); assertTrue(tableName.equals(key.getTablename())); KeyValue kv = val.getKeyValues().get(0); assertTrue(Bytes.equals(row, kv.getRow())); - assertEquals((byte)(i + '0'), kv.getValue()[0]); + assertEquals((byte)(count + '0'), kv.getValue()[0]); System.out.println(key + " " + val); + count++; } } finally { if (log != null) { Index: hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java (revision 1544073) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java (working copy) @@ -140,11 +140,15 @@ public void test() throws Exception { // Grab the path that was generated when the log rolled as part of its creation Path path = pathWatcher.currentPath; + boolean doCompress = TEST_UTIL.getConfiguration().getBoolean( + HConstants.ENABLE_WAL_COMPRESSION, false); // open it, it's empty so it fails try { logManager.openReader(path); - fail("Shouldn't be able to open an empty file"); + if (doCompress) { + fail("Shouldn't be able to open an empty file"); + } } catch (EOFException ex) {} assertEquals(0, logManager.getPosition()); @@ -187,7 +191,9 @@ // Finally we have a new empty log, which should still give us EOFs try { logManager.openReader(path); - fail(); + if (doCompress) { + fail(""); + } } catch (EOFException ex) {} for (int i = 0; i < nbRows; i++) { appendToLogPlus(walEditKVs); } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java (revision 1544073) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java (working copy) @@ -22,6 +22,7 @@ import java.io.EOFException; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -171,7 +172,9 @@ return false; } } - while(temp != null && temp.getKey().getWriteTime() < startTime); + while (temp != null + && (Arrays.equals(temp.getKey().getEncodedRegionName(), HLog.DUMMY_REGION) || temp + .getKey().getWriteTime() < startTime)); if (temp == null) { if (i > 0) LOG.info("Skipped " + i + " entries."); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (revision 1544073) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (working copy) @@ -24,6 +24,7 @@ import java.io.InterruptedIOException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -299,6 +300,9 @@ failedServerName = (serverName == null) ? "" : serverName.getServerName(); while ((entry = getNextLogLine(in, logPath, skipErrors)) != null) { byte[] region = entry.getKey().getEncodedRegionName(); + if (Arrays.equals(region, HLog.DUMMY_REGION)) { + continue; + } String key = Bytes.toString(region); lastFlushedSequenceId = lastFlushedSequenceIds.get(key); if (lastFlushedSequenceId == null) { @@ -688,6 +692,9 @@ */ void appendEntry(Entry entry) throws InterruptedException, IOException { HLogKey key = entry.getKey(); + if (Arrays.equals(key.getEncodedRegionName(), HLog.DUMMY_REGION)) { + return; + } RegionEntryBuffer buffer; long incrHeap; Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java (revision 1544073) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java (working copy) @@ -524,6 +524,23 @@ FSDataOutputStream nextHdfsOut = null; if (nextWriter instanceof ProtobufLogWriter) { nextHdfsOut = ((ProtobufLogWriter)nextWriter).getStream(); + boolean doCompress = conf.getBoolean(HConstants.ENABLE_WAL_COMPRESSION, false); + if (!doCompress) { + // append a dummy entry and sync. So we perform the costly + // allocateBlock and sync before we get the lock to roll writers. + WALEdit edit = new WALEdit(); + List clusterIds = new ArrayList(1); + clusterIds.add(HConstants.DEFAULT_CLUSTER_ID); + HLogKey key = makeKey(DUMMY_REGION /* regionName */, TableName.valueOf(DUMMY_TABLE), + 0L, EnvironmentEdgeManager.currentTimeMillis(), clusterIds, 0L, 0L); + try { + nextWriter.append(new HLog.Entry(key, edit)); + nextWriter.sync(); + } catch (IOException e) { + // optimization failed, no need to abort here. + LOG.warn("write DUMMY entry failed", e); + } + } } Path oldFile = null; Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java (revision 1544073) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java (working copy) @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; import com.google.common.annotations.VisibleForTesting; @@ -66,6 +67,8 @@ // TODO: Implemenation detail. Why in here? Pattern EDITFILES_NAME_PATTERN = Pattern.compile("-?[0-9]+"); String RECOVERED_LOG_TMPFILE_SUFFIX = ".temp"; + byte[] DUMMY_REGION = Bytes.toBytes(""); + String DUMMY_TABLE = "a"; interface Reader {