Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (revision 1082707) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (working copy) @@ -2793,13 +2793,12 @@ int num_unique_rows = 10; int duplicate_multiplier =2; int num_storefiles = 4; - int version = 0; for (int f =0 ; f < num_storefiles; f++) { for (int i = 0; i < duplicate_multiplier; i ++) { for (int j = 0; j < num_unique_rows; j++) { Put put = new Put(Bytes.toBytes("row" + j)); - put.add(fam1, qf1, version++, val1); + put.add(fam1, qf1, version++, val1); region.put(put); } } @@ -2812,7 +2811,7 @@ StoreFile.Reader reader = storefile.getReader(); reader.loadFileInfo(); reader.loadBloomfilter(); - assertEquals(num_unique_rows*duplicate_multiplier, reader.getEntries()); + assertEquals(num_unique_rows * duplicate_multiplier, reader.getEntries()); assertEquals(num_unique_rows, reader.getFilterEntries()); } @@ -2824,9 +2823,9 @@ StoreFile.Reader reader = storefile.getReader(); reader.loadFileInfo(); reader.loadBloomfilter(); - assertEquals(num_unique_rows*duplicate_multiplier*num_storefiles, - reader.getEntries()); - assertEquals(num_unique_rows, reader.getFilterEntries()); + assertEquals(num_unique_rows * duplicate_multiplier * num_storefiles, + reader.getEntries()); + assertEquals(10, reader.getFilterEntries()); } }