From fa86e0a6778eddc39c7c0f3947be7578f8f07f5b Mon Sep 17 00:00:00 2001 From: mbautin Date: Tue, 15 Nov 2011 17:36:49 -0800 Subject: [PATCH] Fix TestHFileBlock heap size test on a 32-bit JVM --- .../apache/hadoop/hbase/io/hfile/HFileBlock.java | 10 +++----- .../regionserver/metrics/SchemaConfigured.java | 21 +++++++++++++++++-- .../org/apache/hadoop/hbase/util/ClassSize.java | 18 +++++++++------- .../hadoop/hbase/io/hfile/TestHFileBlock.java | 9 ++++--- 4 files changed, 37 insertions(+), 21 deletions(-) diff --git src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index cfb8681..7fff570 100644 --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; -import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -417,8 +416,8 @@ public class HFileBlock extends SchemaConfigured implements Cacheable { @Override public long heapSize() { long size = ClassSize.align( - // This object - ClassSize.OBJECT + + // Base class size, including object overhead. + SCHEMA_CONFIGURED_UNALIGNED_HEAP_SIZE + // Block type and byte buffer references 2 * ClassSize.REFERENCE + // On-disk size, uncompressed size, and next block's on-disk size @@ -428,12 +427,11 @@ public class HFileBlock extends SchemaConfigured implements Cacheable { ); if (buf != null) { + // Deep overhead of the byte buffer. Needs to be aligned separately. size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE); } - // SchemaConfigured (but don't count object overhead twice). - size += super.heapSize() - ClassSize.OBJECT; - return size; + return ClassSize.align(size); } /** diff --git src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java index 7f665dc..d8ac453 100644 --- src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java +++ src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java @@ -57,6 +57,23 @@ public class SchemaConfigured implements HeapSize, SchemaAware { */ private SchemaMetrics schemaMetrics; + static { + if (ClassSize.OBJECT <= 0 || ClassSize.REFERENCE <= 0) { + throw new AssertionError("Class sizes are not initialized"); + } + } + + /** + * Estimated heap size of this object. We don't count table name and column + * family name characters because these strings are shared among many + * objects. We need unaligned size to reuse this in subclasses. + */ + public static final int SCHEMA_CONFIGURED_UNALIGNED_HEAP_SIZE = + ClassSize.OBJECT + 3 * ClassSize.REFERENCE; + + private static final int SCHEMA_CONFIGURED_ALIGNED_HEAP_SIZE = + ClassSize.align(SCHEMA_CONFIGURED_UNALIGNED_HEAP_SIZE); + /** A helper constructor that configures the "use table name" flag. */ private SchemaConfigured(Configuration conf) { if (conf != null) { @@ -202,9 +219,7 @@ public class SchemaConfigured implements HeapSize, SchemaAware { @Override public long heapSize() { - // We don't count table name and column family name characters because - // these strings are shared among many objects. - return ClassSize.align(ClassSize.OBJECT + 3 * ClassSize.REFERENCE); + return SCHEMA_CONFIGURED_ALIGNED_HEAP_SIZE; } public String schemaConfAsJSON() { diff --git src/main/java/org/apache/hadoop/hbase/util/ClassSize.java src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index d836fa8..e184730 100755 --- src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -25,7 +25,6 @@ import org.apache.commons.logging.LogFactory; import java.lang.reflect.Field; import java.lang.reflect.Modifier; -import java.util.Properties; /** * Class for determining the "size" of a class, an attempt to calculate the @@ -98,19 +97,13 @@ public class ClassSize { /** Overhead for CopyOnWriteArrayList */ public static final int COPYONWRITE_ARRAYLIST; - private static final String THIRTY_TWO = "32"; - /** * Method for reading the arc settings and setting overheads according * to 32-bit or 64-bit architecture. */ static { - // Figure out whether this is a 32 or 64 bit machine. - Properties sysProps = System.getProperties(); - String arcModel = sysProps.getProperty("sun.arch.data.model"); - //Default value is set to 8, covering the case when arcModel is unknown - if (arcModel.equals(THIRTY_TWO)) { + if (is32BitJVM()) { REFERENCE = 4; } else { REFERENCE = 8; @@ -292,5 +285,14 @@ public class ClassSize { //stored and sent together return ((num + 7) >> 3) << 3; } + + /** + * Determines if we are running in a 32-bit JVM. Some unit tests need to + * know this too. + */ + public static boolean is32BitJVM() { + return System.getProperty("sun.arch.data.model").equals("32"); + } + } diff --git src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 073f282..edfd48c 100644 --- src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -491,10 +491,11 @@ public class TestHFileBlock { @Test public void testBlockHeapSize() { - // We have seen multiple possible values for this estimate of the heap size - // of a ByteBuffer, presumably depending on the JDK version. - assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 64 || - HFileBlock.BYTE_BUFFER_HEAP_SIZE == 80); + if (ClassSize.is32BitJVM()) { + assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 64); + } else { + assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 80); + } for (int size : new int[] { 100, 256, 12345 }) { byte[] byteArr = new byte[HFileBlock.HEADER_SIZE + size]; -- 1.7.4.4