Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/PartialCompactionMapTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/PartialCompactionMapTest.java (revision 1702001)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/PartialCompactionMapTest.java (working copy)
@@ -22,9 +22,10 @@
import static com.google.common.collect.Iterables.get;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap;
-import static com.google.inject.internal.util.$Sets.newHashSet;
+import static com.google.common.collect.Sets.newHashSet;
import static java.io.File.createTempFile;
import static junit.framework.Assert.assertTrue;
+import static org.apache.commons.io.FileUtils.deleteDirectory;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
import static org.apache.jackrabbit.oak.commons.benchmark.MicroBenchmark.run;
import static org.apache.jackrabbit.oak.plugins.segment.Segment.MAX_SEGMENT_SIZE;
@@ -49,7 +50,6 @@
import java.util.UUID;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Sets;
import org.apache.jackrabbit.oak.commons.benchmark.MicroBenchmark.Benchmark;
import org.apache.jackrabbit.oak.plugins.segment.file.FileStore;
import org.junit.After;
@@ -58,9 +58,16 @@
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+/**
+ *
This is a unit test + benchmark test for all the compaction map implementations
+ *
+ *The benchmark tests are disabled by default, to run them you need to set the specific {@code benchmark.*} system property:
+ * {@code mvn test -Dtest.opts.memory=-Xmx4G -Dmaven.test.redirectTestOutputToFile=true -Dtest=PartialCompactionMapTest -Dbenchmark=true -DbenchLargeMap.count=25 }
+ *
+ *
+ */
@RunWith(Parameterized.class)
public class PartialCompactionMapTest {
- private static final boolean BENCH = Boolean.getBoolean("benchmark");
private static final int SEED = Integer.getInteger("SEED", new Random().nextInt());
private final Random rnd = new Random(SEED);
@@ -93,7 +100,11 @@
@After
public void tearDown() {
segmentStore.close();
- directory.delete();
+ try {
+ deleteDirectory(directory);
+ } catch (IOException e) {
+ //
+ }
}
private SegmentTracker getTracker() {
@@ -189,7 +200,7 @@
assertEquals(after1, map.get(before1));
assertEquals(after2, map.get(before2));
- map.remove(Sets.newHashSet(before1.asUUID()));
+ map.remove(newHashSet(before1.asUUID()));
assertNull(map.get(before1));
assertNull(map.get(before2));
assertEquals(0, map.getRecordCount());
@@ -232,20 +243,21 @@
@Test
public void benchLargeMap() {
- assumeTrue(BENCH);
+ assumeTrue(Boolean.getBoolean("benchmark.benchLargeMap"));
assertHeapSize(1000000000);
map = createCompactionMap();
// check the memory use of really large mappings, 1M compacted segments with 10 records each.
Runtime runtime = Runtime.getRuntime();
- for (int i = 0; i < 1000; i++) {
+ for (int i = 0; i < Integer.getInteger("benchmark.benchLargeMap.count", 1000); i++) {
Map ids = randomRecordIdMap(rnd, getTracker(), 10000, 100);
long start = System.nanoTime();
for (Entry entry : ids.entrySet()) {
map.put(entry.getKey(), entry.getValue());
}
System.out.println(
+ "Bench Large Map #" +
(i + 1) + ": " + (runtime.totalMemory() - runtime.freeMemory()) /
(1024 * 1024) + "MB, " + (System.nanoTime() - start) / 1000000 + "ms");
}
@@ -253,7 +265,7 @@
@Test
public void benchPut() throws Exception {
- assumeTrue(BENCH);
+ assumeTrue(Boolean.getBoolean("benchmark.benchPut"));
assertHeapSize(4000000000L);
run(new PutBenchmark(0, 100));
@@ -267,7 +279,7 @@
@Test
public void benchGet() throws Exception {
- assumeTrue(BENCH);
+ assumeTrue(Boolean.getBoolean("benchmark.benchGet"));
assertHeapSize(4000000000L);
run(new GetBenchmark(0, 100));