Index: main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java =================================================================== --- main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java (revision 1450030) +++ main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java (working copy) @@ -110,6 +110,8 @@ * estimate the ideal false positive rate. */ private static Random randomGeneratorForTest; + + private final static int MIN_BLOOMFILTER_SIZE = 1000; // 1000 bytes /** Bit-value lookup array to prevent doing the same work over and over */ private static final byte [] bitvals = { @@ -500,7 +502,8 @@ int newMaxKeys = this.maxKeys; // while exponentially smaller & folding is lossless - while ( (newByteSize & 1) == 0 && newMaxKeys > (this.keyCount<<1) ) { + while ( (newByteSize & 1) == 0 && newMaxKeys > (this.keyCount<<1) + && newByteSize >= MIN_BLOOMFILTER_SIZE * 2) { pieces <<= 1; newByteSize >>= 1; newMaxKeys >>= 1; Index: test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java =================================================================== --- test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java (revision 1450030) +++ test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java (working copy) @@ -80,16 +80,19 @@ public void testBloomFold() throws Exception { // test: foldFactor < log(max/actual) - ByteBloomFilter b = new ByteBloomFilter(1003, (float) 0.01, + ByteBloomFilter b = new ByteBloomFilter(3416, (float) 0.01, Hash.MURMUR_HASH, 2); b.allocBloom(); long origSize = b.getByteSize(); - assertEquals(1204, origSize); + assertEquals(4096, origSize); for (int i = 0; i < 12; ++i) { b.add(Bytes.toBytes(i)); } + b.compactBloom(); - assertEquals(origSize>>2, b.getByteSize()); + // The min size after compaction will still be >= 1000 + // because compaction have a lower limit 1000 + assertEquals(1024, b.getByteSize()); int falsePositives = 0; for (int i = 0; i < 25; ++i) { if (b.contains(Bytes.toBytes(i))) {