Index: src/java/org/apache/hadoop/hbase/KeyValue.java =================================================================== --- src/java/org/apache/hadoop/hbase/KeyValue.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/KeyValue.java (working copy) @@ -1784,8 +1784,9 @@ // HeapSize public long heapSize() { - return ClassSize.alignSize(HeapSize.OBJECT + HeapSize.REFERENCE + - HeapSize.BYTE_ARRAY + length + (2 * Bytes.SIZEOF_INT)); + return ClassSize.align(ClassSize.OBJECT + ClassSize.REFERENCE + + ClassSize.align(ClassSize.ARRAY + length) + + (2 * Bytes.SIZEOF_INT)); } // Writable Index: src/java/org/apache/hadoop/hbase/client/Put.java =================================================================== --- src/java/org/apache/hadoop/hbase/client/Put.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/client/Put.java (working copy) @@ -52,9 +52,10 @@ private Map> familyMap = new TreeMap>(Bytes.BYTES_COMPARATOR); - private static final long OVERHEAD = ClassSize.alignSize(HeapSize.OBJECT + - 1 * HeapSize.REFERENCE + 1 * HeapSize.ARRAY + 2 * Bytes.SIZEOF_LONG + - 1 * Bytes.SIZEOF_BOOLEAN + 1 * HeapSize.REFERENCE + HeapSize.TREEMAP_SIZE); + private static final long OVERHEAD = ClassSize.align( + ClassSize.OBJECT + ClassSize.REFERENCE + + 2 * Bytes.SIZEOF_LONG + Bytes.SIZEOF_BOOLEAN + + ClassSize.REFERENCE + ClassSize.TREEMAP); /** Constructor for Writable. DO NOT USE */ public Put() {} @@ -201,10 +202,16 @@ this.timestamp = timestamp; } + /** + * @return the number of different families included in this put + */ public int numFamilies() { return familyMap.size(); } - + + /** + * @return the total number of KeyValues that will be added with this put + */ public int size() { int size = 0; for(List kvList : this.familyMap.values()) { @@ -270,28 +277,30 @@ //HeapSize public long heapSize() { long heapsize = OVERHEAD; - heapsize += ClassSize.alignSize(this.row.length); - + //Adding row + heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length); + //Adding + heapsize += + ClassSize.align(this.familyMap.size() * ClassSize.MAP_ENTRY); for(Map.Entry> entry : this.familyMap.entrySet()) { - //Adding entry overhead - heapsize += HeapSize.MAP_ENTRY_SIZE; - //Adding key overhead - heapsize += HeapSize.REFERENCE + HeapSize.ARRAY + - ClassSize.alignSize(entry.getKey().length); + heapsize += + ClassSize.align(ClassSize.ARRAY + entry.getKey().length); //This part is kinds tricky since the JVM can reuse references if you //store the same value, but have a good match with SizeOf at the moment //Adding value overhead - heapsize += HeapSize.REFERENCE + HeapSize.ARRAYLIST_SIZE; + heapsize += ClassSize.align(ClassSize.ARRAYLIST); int size = entry.getValue().size(); - heapsize += size * HeapSize.REFERENCE; + heapsize += ClassSize.align(ClassSize.ARRAY + + size * ClassSize.REFERENCE); + for(KeyValue kv : entry.getValue()) { heapsize += kv.heapSize(); } } - return heapsize; + return ClassSize.align((int)heapsize); } //Writable Index: src/java/org/apache/hadoop/hbase/io/HeapSize.java =================================================================== --- src/java/org/apache/hadoop/hbase/io/HeapSize.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/io/HeapSize.java (working copy) @@ -38,38 +38,6 @@ * */ public interface HeapSize { - - /** Reference size is 8 bytes on 64-bit, 4 bytes on 32-bit */ - static final int REFERENCE = 8; - - /** Object overhead is minimum 2 * reference size (8 bytes on 64-bit) */ - static final int OBJECT = 2 * REFERENCE; - - /** Array overhead */ - static final int ARRAY = 3 * REFERENCE; - - /** OverHead for nested arrays */ - static final int MULTI_ARRAY = (4 * REFERENCE) + ARRAY; - - /** Byte arrays are fixed size below plus its length, 8 byte aligned */ - static final int BYTE_ARRAY = 3 * REFERENCE; - - /** Overhead for ByteBuffer */ - static final int BYTE_BUFFER = 56; - - /** String overhead */ - static final int STRING_SIZE = 64; - - /** Overhead for ArrayList(0) */ - static final int ARRAYLIST_SIZE = 64; - - /** Overhead for TreeMap */ - static final int TREEMAP_SIZE = 80; - - /** Overhead for entry in map */ - static final int MAP_ENTRY_SIZE = 64; - - /** * @return Approximate 'exclusive deep size' of implementing object. Includes * count of payload and hosting object sizings. Index: src/java/org/apache/hadoop/hbase/io/hfile/HFile.java =================================================================== --- src/java/org/apache/hadoop/hbase/io/hfile/HFile.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/io/hfile/HFile.java (working copy) @@ -1339,10 +1339,6 @@ */ final RawComparator comparator; - static final int OVERHEAD = (int)ClassSize.alignSize(HeapSize.OBJECT + - 2 * Bytes.SIZEOF_INT + 1 * HeapSize.MULTI_ARRAY + 2 * HeapSize.ARRAY + - 4 * HeapSize.REFERENCE); - /* * Shutdown default constructor */ @@ -1498,23 +1494,28 @@ } public long heapSize() { - long size = OVERHEAD; - + long heapsize = ClassSize.align(ClassSize.OBJECT + + 2 * Bytes.SIZEOF_INT + (3 + 1) * ClassSize.REFERENCE); //Calculating the size of blockKeys if(blockKeys != null) { + //Adding array + references overhead + heapsize += ClassSize.align(ClassSize.ARRAY + + blockKeys.length * ClassSize.REFERENCE); + //Adding bytes for(byte [] bs : blockKeys) { - size += HeapSize.MULTI_ARRAY; - size += ClassSize.alignSize(bs.length); + heapsize += ClassSize.align(ClassSize.ARRAY + bs.length); } } if(blockOffsets != null) { - size += blockOffsets.length * Bytes.SIZEOF_LONG; + heapsize += ClassSize.align(ClassSize.ARRAY + + blockOffsets.length * Bytes.SIZEOF_LONG); } if(blockDataSizes != null) { - size += blockDataSizes.length * Bytes.SIZEOF_INT; + heapsize += ClassSize.align(ClassSize.ARRAY + + blockDataSizes.length * Bytes.SIZEOF_INT); } - return size; + return ClassSize.align(heapsize); } } Index: src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java =================================================================== --- src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java (working copy) @@ -90,9 +90,10 @@ private long missCount = 0; /** Memory overhead of this Object (for HeapSize) */ - private static final int OVERHEAD = (int)ClassSize.alignSize(HeapSize.OBJECT + - 1 * Bytes.SIZEOF_FLOAT + 2 * Bytes.SIZEOF_INT + 1 * HeapSize.ARRAY + - 3 * HeapSize.REFERENCE + 4 * Bytes.SIZEOF_LONG); + private static final int OVERHEAD = ClassSize.align( + ClassSize.OBJECT + 1 * Bytes.SIZEOF_FLOAT + 2 * Bytes.SIZEOF_INT + + ClassSize.align(ClassSize.ARRAY) + 3 * ClassSize.REFERENCE + + 4 * Bytes.SIZEOF_LONG); /** * Constructs a new, empty map with the specified initial capacity, @@ -119,7 +120,7 @@ if (loadFactor <= 0 || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Load factor must be > 0"); } - if (maxMemUsage <= (OVERHEAD + initialCapacity * HeapSize.REFERENCE)) { + if (maxMemUsage <= (OVERHEAD + initialCapacity * ClassSize.REFERENCE)) { throw new IllegalArgumentException("Max memory usage too small to " + "support base overhead"); } @@ -300,7 +301,7 @@ * @return memory usage of map in bytes */ public long heapSize() { - return (memTotal - memFree); + return ClassSize.align(memTotal - memFree); } //-------------------------------------------------------------------------- @@ -503,7 +504,7 @@ * @return baseline memory overhead of object in bytes */ private long getMinimumUsage() { - return OVERHEAD + (entries.length * HeapSize.REFERENCE); + return OVERHEAD + (entries.length * ClassSize.REFERENCE); } //-------------------------------------------------------------------------- @@ -724,7 +725,7 @@ } // Determine how much additional space will be required to grow the array - long requiredSpace = (newCapacity - oldCapacity) * HeapSize.REFERENCE; + long requiredSpace = (newCapacity - oldCapacity) * ClassSize.REFERENCE; // Verify/enforce we have sufficient memory to grow checkAndFreeMemory(requiredSpace); @@ -833,7 +834,6 @@ */ private void init() { memFree -= OVERHEAD; - memFree -= (entries.length * HeapSize.REFERENCE); } //-------------------------------------------------------------------------- @@ -975,8 +975,9 @@ protected long heapSize; /** The baseline overhead memory usage of this class */ - static final int OVERHEAD = HeapSize.OBJECT + 5 * HeapSize.REFERENCE + - 1 * Bytes.SIZEOF_INT + 1 * Bytes.SIZEOF_LONG; + static final int OVERHEAD = ClassSize.OBJECT + + 5 * ClassSize.REFERENCE + 1 * Bytes.SIZEOF_INT + + 1 * Bytes.SIZEOF_LONG; /** * Create a new entry. @@ -1139,8 +1140,8 @@ * @return size of String in bytes */ private long heapSize(String s) { - return HeapSize.STRING_SIZE + - ClassSize.alignSize(s.length() * Bytes.SIZEOF_CHAR); + return ClassSize.STRING + ClassSize.align(ClassSize.ARRAY + + s.length() * Bytes.SIZEOF_CHAR); } /** @@ -1148,7 +1149,8 @@ * @return size of ByteBuffer in bytes */ private long heapSize(ByteBuffer b) { - return HeapSize.BYTE_BUFFER + ClassSize.alignSize(b.capacity()); + return ClassSize.BYTE_BUFFER + + ClassSize.align(ClassSize.ARRAY + b.capacity()); } } Index: src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java =================================================================== --- src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java (working copy) @@ -22,6 +22,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.*; import java.io.*; @@ -42,7 +43,7 @@ private long logSeqNum; // Time at which this edit was written. private long writeTime; - private int HEAP_TAX = HeapSize.OBJECT + (2 * HeapSize.BYTE_ARRAY) + + private int HEAP_TAX = ClassSize.OBJECT + (2 * ClassSize.ARRAY) + (2 * Bytes.SIZEOF_LONG); /** Writable Consructor -- Do not use. */ Index: src/java/org/apache/hadoop/hbase/regionserver/LruHashMap.java =================================================================== --- src/java/org/apache/hadoop/hbase/regionserver/LruHashMap.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/regionserver/LruHashMap.java (working copy) @@ -28,13 +28,11 @@ import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClassSize; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - - - /** * The LruHashMap is a memory-aware HashMap with a configurable maximum * memory footprint. @@ -67,8 +65,8 @@ /** Memory overhead of this Object (for HeapSize) */ private static final int OVERHEAD = 5 * Bytes.SIZEOF_LONG + - 2 * Bytes.SIZEOF_INT + 2 * Bytes.SIZEOF_FLOAT + 3 * HeapSize.REFERENCE + - 1 * HeapSize.ARRAY; + 2 * Bytes.SIZEOF_INT + 2 * Bytes.SIZEOF_FLOAT + 3 * ClassSize.REFERENCE + + 1 * ClassSize.ARRAY; /** Load factor allowed (usually 75%) */ private final float loadFactor; @@ -119,7 +117,7 @@ if (loadFactor <= 0 || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Load factor must be > 0"); } - if (maxMemUsage <= (OVERHEAD + initialCapacity * HeapSize.REFERENCE)) { + if (maxMemUsage <= (OVERHEAD + initialCapacity * ClassSize.REFERENCE)) { throw new IllegalArgumentException("Max memory usage too small to " + "support base overhead"); } @@ -472,7 +470,7 @@ * @return baseline memory overhead of object in bytes */ private long getMinimumUsage() { - return OVERHEAD + (entries.length * HeapSize.REFERENCE); + return OVERHEAD + (entries.length * ClassSize.REFERENCE); } //-------------------------------------------------------------------------- @@ -693,7 +691,7 @@ } // Determine how much additional space will be required to grow the array - long requiredSpace = (newCapacity - oldCapacity) * HeapSize.REFERENCE; + long requiredSpace = (newCapacity - oldCapacity) * ClassSize.REFERENCE; // Verify/enforce we have sufficient memory to grow checkAndFreeMemory(requiredSpace); @@ -802,7 +800,7 @@ */ private void init() { memFree -= OVERHEAD; - memFree -= (entries.length * HeapSize.REFERENCE); + memFree -= (entries.length * ClassSize.REFERENCE); } //-------------------------------------------------------------------------- @@ -927,8 +925,8 @@ protected static class Entry implements Map.Entry, HeapSize { /** The baseline overhead memory usage of this class */ - static final int OVERHEAD = 1 * Bytes.SIZEOF_LONG + 5 * HeapSize.REFERENCE + - 2 * Bytes.SIZEOF_INT; + static final int OVERHEAD = 1 * Bytes.SIZEOF_LONG + + 5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT; /** The key */ protected final K key; Index: src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java =================================================================== --- src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (revision 786666) +++ src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (working copy) @@ -33,7 +33,6 @@ * A query matcher that is specifically designed for the scan case. */ public class ScanQueryMatcher extends QueryMatcher { - private Filter filter; // have to support old style filter for now. private RowFilterInterface oldFilter; Index: src/java/org/apache/hadoop/hbase/util/ClassSize.java =================================================================== --- src/java/org/apache/hadoop/hbase/util/ClassSize.java (revision 788096) +++ src/java/org/apache/hadoop/hbase/util/ClassSize.java (working copy) @@ -20,12 +20,13 @@ package org.apache.hadoop.hbase.util; +import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.io.HeapSize; /** * Class for determining the "size" of a class, an attempt to calculate the @@ -36,34 +37,80 @@ public class ClassSize { static final Log LOG = LogFactory.getLog(ClassSize.class); - private int refSize; - private int minObjectSize; + private static int nrOfRefsPerObj = 2; + + /** Array overhead */ + public static int ARRAY = 0; + + /** Overhead for ArrayList(0) */ + public static int ARRAYLIST = 0; + + /** Overhead for ByteBuffer */ + public static int BYTE_BUFFER = 0; + + /** Overhead for an Integer */ + public static int INTEGER = 0; + + /** Overhead for entry in map */ + public static int MAP_ENTRY = 0; + + /** Object overhead is minimum 2 * reference size (8 bytes on 64-bit) */ + public static int OBJECT = 0; + + /** Reference size is 8 bytes on 64-bit, 4 bytes on 32-bit */ + public static int REFERENCE = 0; + + /** String overhead */ + public static int STRING = 0; + + /** Overhead for TreeMap */ + public static int TREEMAP = 0; + + private static final String THIRTY_TWO = "32"; + + /** + * Static loader + * @throws IOException + */ + ClassSize() throws IOException{ + reload(); + } /** - * Constructor - * @throws Exception + * Method for rereading the arc settings + * @throws IOException */ - public ClassSize() throws Exception{ + public static void reload() throws IOException { // Figure out whether this is a 32 or 64 bit machine. - Runtime runtime = Runtime.getRuntime(); - int loops = 10; - int sz = 0; - for(int i = 0; i < loops; i++) { - cleaner(runtime, i); - long memBase = runtime.totalMemory() - runtime.freeMemory(); - Object[] junk = new Object[10000]; - cleaner(runtime, i); - long memUsed = runtime.totalMemory() - runtime.freeMemory() - memBase; - sz = (int)((memUsed + junk.length/2)/junk.length); - if(sz > 0 ) { - break; - } + Properties sysProps = System.getProperties(); + String arcModel = sysProps.getProperty("sun.arch.data.model"); + + //Default value is set to 8, covering the case when arcModel is unknown + REFERENCE = 8; + if (arcModel.equals(THIRTY_TWO)) { + REFERENCE = 4; } + + ARRAY = 3 * REFERENCE; - refSize = ( 4 > sz) ? 4 : sz; - minObjectSize = 4*refSize; + ARRAYLIST = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT + + align(Bytes.SIZEOF_INT)); + + BYTE_BUFFER = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT + + 3 * Bytes.SIZEOF_BOOLEAN + 4 * Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG); + + INTEGER = align(OBJECT + Bytes.SIZEOF_INT); + + MAP_ENTRY = align(OBJECT + 5 * REFERENCE + Bytes.SIZEOF_BOOLEAN); + + OBJECT = 2 * REFERENCE; + + TREEMAP = align(OBJECT + 2 * Bytes.SIZEOF_INT + (5+2) * REFERENCE + + ClassSize.align(OBJECT + Bytes.SIZEOF_INT)); + + STRING = align(OBJECT + REFERENCE + 3 * Bytes.SIZEOF_INT); } - + /** * The estimate of the size of a class instance depends on whether the JVM * uses 32 or 64 bit addresses, that is it depends on the size of an object @@ -78,10 +125,11 @@ * primitives, the second the number of arrays and the third the number of * references. */ - private int [] getSizeCoefficients(Class cl, boolean debug) { + private static int [] getSizeCoefficients(Class cl, boolean debug) { int primitives = 0; int arrays = 0; - int references = HeapSize.OBJECT / HeapSize.REFERENCE; + //The number of references that a new object takes + int references = nrOfRefsPerObj; for( ; null != cl; cl = cl.getSuperclass()) { Field[] field = cl.getDeclaredFields(); @@ -91,8 +139,9 @@ Class fieldClass = field[i].getType(); if( fieldClass.isArray()){ arrays++; + references++; } - else if(! fieldClass.isPrimitive()){ + else if(!fieldClass.isPrimitive()){ references++; } else {// Is simple primitive @@ -136,21 +185,21 @@ * * @return the size estimate, in bytes */ - private long estimateBaseFromCoefficients(int [] coeff, boolean debug) { - int size = coeff[0] + (coeff[1]*4 + coeff[2])*refSize; + private static long estimateBaseFromCoefficients(int [] coeff, boolean debug) { + long size = coeff[0] + align(coeff[1]*ARRAY) + coeff[2]*REFERENCE; // Round up to a multiple of 8 - size = (int)alignSize(size); + size = align(size); if(debug) { if (LOG.isDebugEnabled()) { // Write out region name as string and its encoded name. LOG.debug("Primitives " + coeff[0] + ", arrays " + coeff[1] + - ", references(inlcuding " + HeapSize.OBJECT + - ", for object overhead) " + coeff[2] + ", refSize " + refSize + + ", references(inlcuding " + nrOfRefsPerObj + + ", for object overhead) " + coeff[2] + ", refSize " + REFERENCE + ", size " + size); } } - return (size < minObjectSize) ? minObjectSize : size; + return size; } /** @@ -162,33 +211,28 @@ * * @return the size estimate in bytes. */ - public long estimateBase(Class cl, boolean debug) { + public static long estimateBase(Class cl, boolean debug) { return estimateBaseFromCoefficients( getSizeCoefficients(cl, debug), debug); } /** - * Tries to clear all the memory used to estimate the reference size for the - * current JVM - * @param runtime - * @param i - * @throws Exception + * Aligns a number to 8. + * @param num number to align to 8 + * @return smallest number >= input that is a multiple of 8 */ - private void cleaner(Runtime runtime, int i) throws Exception{ - Thread.sleep(i*1000); - runtime.gc();runtime.gc(); runtime.gc();runtime.gc();runtime.gc(); - runtime.runFinalization(); + public static int align(int num) { + return (int)(align((long)num)); } - /** * Aligns a number to 8. * @param num number to align to 8 * @return smallest number >= input that is a multiple of 8 */ - public static long alignSize(int num) { - int aligned = (num + 7)/8; - aligned *= 8; - return aligned; + public static long align(long num) { + //The 7 comes from that the alignSize is 8 which is the number of bytes + //stored and sent together + return ((num + 7) >> 3) << 3; } } Index: src/test/org/apache/hadoop/hbase/io/TestHeapSize.java =================================================================== --- src/test/org/apache/hadoop/hbase/io/TestHeapSize.java (revision 788096) +++ src/test/org/apache/hadoop/hbase/io/TestHeapSize.java (working copy) @@ -1,5 +1,7 @@ package org.apache.hadoop.hbase.io; +import java.io.IOException; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue; @@ -24,47 +26,46 @@ * Testing the classes that implements HeapSize and are a part of 0.20. * Some are not tested here for example BlockIndex which is tested in * TestHFile since it is a non public class + * @throws IOException */ - public void testSizes() { - ClassSize cs = null; + public void testSizes() throws IOException { + ClassSize.reload(); Class cl = null; long expected = 0L; long actual = 0L; - try { - cs = new ClassSize(); - } catch(Exception e) {} //KeyValue cl = KeyValue.class; - expected = cs.estimateBase(cl, false); + expected = ClassSize.estimateBase(cl, false); + KeyValue kv = new KeyValue(); actual = kv.heapSize(); if(expected != actual) { - cs.estimateBase(cl, true); + ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } //LruBlockCache cl = LruBlockCache.class; - expected = cs.estimateBase(cl, false); + expected = ClassSize.estimateBase(cl, false); LruBlockCache c = new LruBlockCache(1,1,200); //Since minimum size for the for a LruBlockCache is 1 //we need to remove one reference from the heapsize - actual = c.heapSize() - HeapSize.REFERENCE; + actual = c.heapSize();// - ClassSize.REFERENCE_SIZE; if(expected != actual) { - cs.estimateBase(cl, true); + ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } //Put cl = Put.class; - expected = cs.estimateBase(cl, false); + expected = ClassSize.estimateBase(cl, false); //The actual TreeMap is not included in the above calculation - expected += HeapSize.TREEMAP_SIZE; + expected += ClassSize.TREEMAP; Put put = new Put(Bytes.toBytes("")); actual = put.heapSize(); if(expected != actual) { - cs.estimateBase(cl, true); + ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } } Index: src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java =================================================================== --- src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java (revision 788096) +++ src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java (working copy) @@ -251,25 +251,22 @@ /** * Checks if the HeapSize calculator is within reason */ - public void testHeapSizeForBlockIndex() { - ClassSize cs = null; + public void testHeapSizeForBlockIndex() throws IOException{ + ClassSize.reload(); Class cl = null; long expected = 0L; long actual = 0L; - try { - cs = new ClassSize(); - } catch(Exception e) {} - //KeyValue cl = BlockIndex.class; - expected = cs.estimateBase(cl, false); + expected = ClassSize.estimateBase(cl, false); BlockIndex bi = new BlockIndex(Bytes.BYTES_RAWCOMPARATOR); actual = bi.heapSize(); - //Since we have a [[]] in BlockIndex and the checker only sees the [] we - // miss a MULTI_ARRAY which is 4*Reference = 32 B - actual -= 32; + //Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets, + //int [] blockDataSizes) are all null they are not going to show up in the + //HeapSize calculation, so need to remove those array costs from ecpected. + expected -= ClassSize.align(3 * ClassSize.ARRAY); if(expected != actual) { - cs.estimateBase(cl, true); + ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } }