Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java (revision 1329750) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java (working copy) @@ -550,6 +550,7 @@ } midLeafBlockOffset = in.readLong(); + LOG.debug("The midLeafBlockOffset is "+midLeafBlockOffset); midLeafBlockOnDiskSize = in.readInt(); midKeyEntry = in.readInt(); } Index: src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java (revision 1329750) +++ src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java (working copy) @@ -74,7 +74,7 @@ Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testHFileFormatV2"); - final Compression.Algorithm COMPRESS_ALGO = Compression.Algorithm.GZ; + final Compression.Algorithm COMPRESS_ALGO = Compression.Algorithm.NONE; HFileWriterV2 writer = (HFileWriterV2) new HFileWriterV2.WriterFactoryV2(conf, new CacheConfig(conf)) .withPath(fs, hfilePath) @@ -88,7 +88,7 @@ Random rand = new Random(9713312); // Just a fixed seed. - final int ENTRY_COUNT = 10000; + final int ENTRY_COUNT = 50000; List keys = new ArrayList(); List values = new ArrayList();