diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
index 184e4c7f8f..611df9cbb2 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
@@ -18,6 +18,7 @@ package org.apache.jackrabbit.oak.plugins.index.lucene.directory;
 
 import com.google.common.base.Joiner;
 import com.google.common.collect.Lists;
+import org.apache.commons.io.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.filefilter.IOFileFilter;
 import org.apache.commons.io.filefilter.RegexFileFilter;
@@ -148,7 +149,7 @@ public class ActiveDeletedBlobCollectorFactory {
             this.clock = clock;
             this.rootDirectory = rootDirectory;
             this.executorService = executorService;
-            this.deletedBlobs = new ArrayBlockingQueue<>(1000); //1000 items should be ok for async index commits
+            this.deletedBlobs = new ArrayBlockingQueue<>(10000); //10000 items should be ok for async index commits
             this.deletedBlobsFileWriter = new DeletedBlobsFileWriter();
         }
 
@@ -232,6 +233,10 @@ public class ActiveDeletedBlobCollectorFactory {
 
         private long readLastCheckedBlobTimestamp() {
             File blobCollectorInfoFile = new File(rootDirectory, "collection-info.txt");
+            if (!blobCollectorInfoFile.exists()) {
+                LOG.info("Couldn't read last checked blob timestamp (file not found)");
+                return -1;
+            }
             InputStream is = null;
             Properties p;
             try {
@@ -239,6 +244,7 @@ public class ActiveDeletedBlobCollectorFactory {
                 p = new Properties();
                 p.load(is);
             } catch (IOException e) {
+                LOG.debug("IOException reading file " + blobCollectorInfoFile, e);
                 LOG.info("Couldn't read last checked blob timestamp... would do a bit more scan");
                 return -1;
             } finally {
@@ -292,16 +298,20 @@ public class ActiveDeletedBlobCollectorFactory {
         }
 
         private void addDeletedBlobs(Collection<BlobIdInfoStruct> deletedBlobs) {
+            int addedForFlush = 0;
             for (BlobIdInfoStruct info : deletedBlobs) {
                 try {
                     if (!this.deletedBlobs.offer(info, 1, TimeUnit.SECONDS)) {
                         LOG.warn("Timed out while offer-ing {} into queue.", info);
                     }
+                    if (LOG.isDebugEnabled()) {
+                        addedForFlush++;
+                    }
                 } catch (InterruptedException e) {
                     LOG.warn("Interrupted while adding " + info, e);
                 }
             }
-            LOG.debug("Added {} to be flushed", deletedBlobs.size());
+            LOG.debug("Added {} (out of {} tried) to be flushed. QSize: {}", addedForFlush, deletedBlobs.size(), this.deletedBlobs.size());
             deletedBlobsFileWriter.scheduleFileFlushIfNeeded();
         }
 
@@ -318,12 +328,19 @@ public class ActiveDeletedBlobCollectorFactory {
                 if (localDeletedBlobs.size() > 0) {
                     File outFile = new File(rootDirectory, getBlobFileName());
                     try {
+                        if (LOG.isDebugEnabled()) {
+                            LOG.debug("NumLines before: {}", outFile.exists()?FileUtils.readLines(outFile, Charsets.toCharset((String)null)).size():0);
+                        }
                         long start = PERF_LOG.start();
                         FileUtils.writeLines(outFile, localDeletedBlobs, true);
                         PERF_LOG.end(start, 1, "Flushing deleted blobs");
+                        if (LOG.isDebugEnabled()) {
+                            LOG.debug("NumLines after: {}", FileUtils.readLines(outFile, Charsets.toCharset((String)null)).size());
+                        }
                     } catch (IOException e) {
                         LOG.error("Couldn't write out to " + outFile, e);
                     }
+                    LOG.debug("Flushed {} blobs to {}", localDeletedBlobs.size(), outFile.getName());
                 }
             }
 
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorTest.java
index 47cd797a8d..904e718f77 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorTest.java
@@ -32,6 +32,8 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 import org.mockito.internal.util.collections.Sets;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.annotation.Nonnull;
 import java.io.File;
@@ -43,6 +45,7 @@ import java.nio.file.Path;
 import java.nio.file.attribute.PosixFilePermission;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
@@ -55,10 +58,14 @@ import static org.apache.jackrabbit.oak.plugins.index.IndexCommitCallback.IndexP
 import static org.apache.jackrabbit.oak.plugins.index.IndexCommitCallback.IndexProgress.COMMIT_SUCCEDED;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assume.assumeTrue;
 
 public class ActiveDeletedBlobCollectorTest {
+    static Logger LOG = LoggerFactory.getLogger(ActiveDeletedBlobCollectorTest.class.getName());
+
     @Rule
     public TemporaryFolder blobCollectionRoot = new TemporaryFolder(new File("target"));
 
@@ -169,13 +176,13 @@ public class ActiveDeletedBlobCollectorTest {
     @Ignore("OAK-6314")
     @Test
     public void multiThreadedCommits() throws Exception {
-        clock = Clock.SIMPLE;
         ExecutorService executorService = Executors.newFixedThreadPool(3);
-        adbc = ActiveDeletedBlobCollectorFactory.newInstance(
-                new File(blobCollectionRoot.getRoot(), "b"), executorService);
+        File rootDirectory = new File(blobCollectionRoot.getRoot(), "b");
+        FileUtils.forceMkdir(rootDirectory);
+        adbc = new ActiveDeletedBlobCollectorImpl(clock, rootDirectory, executorService);
 
-        int numThreads = 4;
-        int numBlobsPerThread = 500;
+        int numThreads = 10;
+        int numBlobsPerThread = 200;
 
         List<Thread> threads = new ArrayList<>(numThreads);
         final AtomicInteger threadIndex = new AtomicInteger(0);
@@ -214,10 +221,8 @@ public class ActiveDeletedBlobCollectorTest {
             t.join();
         }
 
-        // Push one more commit to flush out any remaining ones
-        adbc.getBlobDeletionCallback().commitProgress(COMMIT_SUCCEDED);
-
-        executorService.awaitTermination(100, TimeUnit.MILLISECONDS);
+        boolean timeout = executorService.awaitTermination(100, TimeUnit.MILLISECONDS);
+        assertFalse(timeout);
 
         List<String> deletedChunks = new ArrayList<>(numThreads*numBlobsPerThread*2);
         for (int threadNum = 0; threadNum < numThreads; threadNum++) {
@@ -227,11 +232,40 @@ public class ActiveDeletedBlobCollectorTest {
             }
         }
 
-        adbc.purgeBlobsDeleted(clock.getTimeIncreasing(), blobStore);
+        long until = Clock.SIMPLE.getTime() + TimeUnit.SECONDS.toMillis(3);
+        List<String> markerChunks = Lists.newArrayList();
+        int i = 0;
+        while (Clock.SIMPLE.getTime() < until) {
+            // Push commit with a marker blob-id and wait for it to be purged
+            BlobDeletionCallback bdc = adbc.getBlobDeletionCallback();
+            String markerBlobId = "MARKER-" + (i++);
+            bdc.deleted(markerBlobId, Lists.newArrayList(markerBlobId));
+            bdc.commitProgress(COMMIT_SUCCEDED);
+
+            Iterators.addAll(markerChunks, blobStore.resolveChunks(markerBlobId));
+
+            Thread.sleep(500);
+
+            clock.waitUntil(clock.getTime() + TimeUnit.SECONDS.toMillis(5));
+            adbc.purgeBlobsDeleted(clock.getTimeIncreasing(), blobStore);
+            Thread.yield();
+
+            if (blobStore.markerChunkDeleted) {
+                break;
+            }
+        }
+
+        assertTrue("Timed out while waiting for marker chunk to be purged", blobStore.markerChunkDeleted);
+
+        blobStore.deletedChunkIds.removeAll(markerChunks);
+
+        HashSet<String> list = new HashSet<>(deletedChunks);
+        list.removeAll(blobStore.deletedChunkIds);
+        assertTrue("size: " + list.size() + "; list: " + list.toString(), list.isEmpty());
 
         assertThat(blobStore.deletedChunkIds, containsInAnyOrder(deletedChunks.toArray()));
     }
-
+    
     @Test
     public void inaccessibleWorkDirGivesNoop() throws Exception {
         assumeNotWindows();
@@ -271,6 +305,7 @@ public class ActiveDeletedBlobCollectorTest {
 
     class ChunkDeletionTrackingBlobStore implements GarbageCollectableBlobStore {
         List<String> deletedChunkIds = Lists.newArrayList();
+        volatile boolean markerChunkDeleted = false;
 
         @Override
         public String writeBlob(InputStream in) throws IOException {
@@ -350,15 +385,28 @@ public class ActiveDeletedBlobCollectorTest {
         @Override
         public boolean deleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception {
             deletedChunkIds.addAll(chunkIds);
+            setMarkerChunkDeletedFlag(chunkIds);
             return true;
         }
 
         @Override
         public long countDeleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception {
             deletedChunkIds.addAll(chunkIds);
+            setMarkerChunkDeletedFlag(chunkIds);
             return chunkIds.size();
         }
 
+        private void setMarkerChunkDeletedFlag(List<String> deletedChunkIds) {
+            if (!markerChunkDeleted) {
+                for (String chunkId : deletedChunkIds) {
+                    if (chunkId.startsWith("MARKER")) {
+                        markerChunkDeleted = true;
+                        break;
+                    }
+                }
+            }
+        }
+
         @Override
         public Iterator<String> resolveChunks(String blobId) throws IOException {
             return Iterators.forArray(blobId + "-1", blobId + "-2");
diff --git a/oak-lucene/src/test/resources/logback-test.xml b/oak-lucene/src/test/resources/logback-test.xml
index e92401c93e..f02da443ad 100644
--- a/oak-lucene/src/test/resources/logback-test.xml
+++ b/oak-lucene/src/test/resources/logback-test.xml
@@ -21,6 +21,7 @@
             <pattern>%date{HH:mm:ss.SSS} %-5level %-40([%thread] %F:%L) %msg%n</pattern>
         </encoder>
     </appender>
+    
 
     <appender name="file" class="ch.qos.logback.core.FileAppender">
         <file>target/unit-tests.log</file>
@@ -29,6 +30,8 @@
         </encoder>
     </appender>
 
+    <logger name="org.apache.jackrabbit.oak.plugins.index.lucene.directory" level="DEBUG"/>
+
     <root level="INFO">
         <!--
         <appender-ref ref="console"/>
