Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentDataStoreBlobGCTest.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentDataStoreBlobGCTest.java (date 1423804879000) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentDataStoreBlobGCTest.java (revision ) @@ -31,6 +31,7 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; import com.google.common.collect.Lists; import com.google.common.collect.Sets; @@ -53,6 +54,8 @@ import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.junit.After; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; @@ -60,6 +63,9 @@ * Tests for SegmentNodeStore DataStore GC */ public class SegmentDataStoreBlobGCTest { + private static final Logger log = LoggerFactory + .getLogger(SegmentDataStoreBlobGCTest.class); + SegmentNodeStore nodeStore; FileStore store; DataStoreBlobStore blobStore; @@ -69,7 +75,7 @@ store = new FileStore(blobStore, getWorkDir(), 256, false); CompactionStrategy compactionStrategy = new CompactionStrategy(false, true, - CompactionStrategy.CleanupType.CLEAN_OLD, 0, CompactionStrategy.MEMORY_THRESHOLD_DEFAULT) { + CompactionStrategy.CleanupType.CLEAN_OLD, 5, CompactionStrategy.MEMORY_THRESHOLD_DEFAULT) { @Override public boolean compacted(@Nonnull Callable setHead) throws Exception { return setHead.call(); @@ -105,8 +111,11 @@ processed.add(n); } } + + List createdBlobs = Lists.newArrayList(); for (int i = 0; i < number; i++) { SegmentBlob b = (SegmentBlob) nodeStore.createBlob(randomStream(i, 16516)); + createdBlobs.add(b.getBlobId()); if (!processed.contains(i)) { Iterator idIter = blobStore .resolveChunks(b.getBlobId()); @@ -117,10 +126,15 @@ a.child("c" + i).setProperty("x", b); } nodeStore.merge(a, EmptyHook.INSTANCE, CommitInfo.EMPTY); + log.info("Created blobs : {}", createdBlobs.size()); for (int id : processed) { delete("c" + id); } + log.info("Deleted nodes : {}", processed.size()); + + // Sleep a little to make eligible for cleanup + TimeUnit.MILLISECONDS.sleep(25); store.compact(); return set; @@ -152,6 +166,9 @@ gc.collectGarbage(false); Set existingAfterGC = iterate(); + log.info("{} blobs that should have remained after gc : {}", remaining.size(), remaining); + log.info("{} blobs existing after gc : {}", existingAfterGC.size(), existingAfterGC); + assertTrue(Sets.symmetricDifference(remaining, existingAfterGC).isEmpty()); }