diff --git a/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 609f8cc..c62466a 100644 --- a/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -713,7 +713,7 @@ public class HFile { // Read in the metadata index. if (trailer.metaIndexCount > 0) { - this.metaIndex = BlockIndex.readIndex(this.comparator, + this.metaIndex = BlockIndex.readIndex(Bytes.BYTES_RAWCOMPARATOR, this.istream, this.trailer.metaIndexOffset, trailer.metaIndexCount); } this.fileInfoLoaded = true; @@ -784,7 +784,10 @@ public class HFile { * @throws IOException */ public ByteBuffer getMetaBlock(String metaBlockName) throws IOException { + if (trailer.metaIndexCount == 0) { + return null; // there are no meta blocks + } if (metaIndex == null) { throw new IOException("Meta index not loaded"); } */ diff --git a/src/java/org/apache/hadoop/hbase/util/Bytes.java b/src/java/org/apache/hadoop/hbase/util/Bytes.java index d2d53b3..af717a7 100644 --- a/src/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/src/java/org/apache/hadoop/hbase/util/Bytes.java @@ -44,6 +44,7 @@ public class Bytes { * Estimate based on study of jhat and jprofiler numbers. */ // JHat says BU is 56 bytes. + // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) public static final int ESTIMATED_HEAP_TAX = 16; /** diff --git a/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java index 1815115..3d86951 100644 --- a/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -169,7 +169,7 @@ public class TestHFile extends TestCase { private void writeNumMetablocks(Writer writer, int n) { for (int i = 0; i < n; i++) { - writer.appendMetaBlock("TfileMeta" + i, ("something to test" + i).getBytes()); + writer.appendMetaBlock("HFileMeta" + i, ("something to test" + i).getBytes()); } } @@ -179,7 +179,7 @@ public class TestHFile extends TestCase { private void readNumMetablocks(Reader reader, int n) throws IOException { for (int i = 0; i < n; i++) { - ByteBuffer b = reader.getMetaBlock("TfileMeta" + i); + ByteBuffer b = reader.getMetaBlock("HFileMeta" + i); byte [] found = Bytes.toBytes(b); assertTrue("failed to match metadata", Arrays.equals( ("something to test" + i).getBytes(), found)); @@ -191,7 +191,7 @@ public class TestHFile extends TestCase { } private void metablocks(final String compress) throws Exception { - Path mFile = new Path(ROOT_DIR, "meta.tfile"); + Path mFile = new Path(ROOT_DIR, "meta.hfile"); FSDataOutputStream fout = createFSOutput(mFile); Writer writer = new Writer(fout, minBlockSize, Compression.getCompressionAlgorithmByName(compress), null, false); @@ -216,6 +216,19 @@ public class TestHFile extends TestCase { metablocks("gz"); } + public void testNullMetaBlocks() throws Exception { + Path mFile = new Path(ROOT_DIR, "nometa.hfile"); + FSDataOutputStream fout = createFSOutput(mFile); + Writer writer = new Writer(fout, minBlockSize, + Compression.Algorithm.NONE, null, false); + writer.append("foo".getBytes(), "value".getBytes()); + writer.close(); + fout.close(); + Reader reader = new Reader(fs, mFile, null); + reader.loadFileInfo(); + assertNull(reader.getMetaBlock("non-existant")); + } + /** * Make sure the orginals for our compression libs doesn't change on us. */