Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java	(revision )
@@ -29,6 +29,7 @@
 import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LuceneDocumentHolder;
 import org.apache.jackrabbit.oak.plugins.index.lucene.writer.DefaultIndexWriterFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriterFactory;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.commit.CommitContext;
 import org.apache.jackrabbit.oak.spi.commit.Editor;
 import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
@@ -55,8 +56,10 @@
     private final IndexCopier indexCopier;
     private final ExtractedTextCache extractedTextCache;
     private final IndexAugmentorFactory augmentorFactory;
-    private final LuceneIndexWriterFactory indexWriterFactory;
+    private LuceneIndexWriterFactory indexWriterFactory;
     private final IndexTracker indexTracker;
+    private final MountInfoProvider mountInfoProvider;
+    private GarbageCollectableBlobStore blobStore;
 
     /**
      * Number of indexed Lucene document that can be held in memory
@@ -95,7 +98,7 @@
         this.indexTracker = indexTracker;
         this.extractedTextCache = extractedTextCache != null ? extractedTextCache : new ExtractedTextCache(0, 0);
         this.augmentorFactory = augmentorFactory;
-        this.indexWriterFactory = new DefaultIndexWriterFactory(checkNotNull(mountInfoProvider), indexCopier);
+        this.mountInfoProvider = checkNotNull(mountInfoProvider);
     }
 
     @Override
@@ -107,6 +110,7 @@
             checkArgument(callback instanceof ContextAwareCallback, "callback instance not of type " +
                     "ContextAwareCallback [%s]", callback);
             IndexingContext indexingContext = ((ContextAwareCallback)callback).getIndexingContext();
+            indexWriterFactory = new DefaultIndexWriterFactory(mountInfoProvider, indexCopier, blobStore);
             LuceneIndexWriterFactory writerFactory = indexWriterFactory;
             IndexDefinition indexDefinition = null;
             boolean asyncIndexing = true;
@@ -178,8 +182,15 @@
         return holder;
     }
 
+    public void setBlobStore(@Nullable GarbageCollectableBlobStore blobStore) {
+        this.blobStore = blobStore;
+    }
+
+    GarbageCollectableBlobStore getBlobStore() {
+        return blobStore;
+    }
+
     private static CommitContext getCommitContext(IndexingContext indexingContext) {
         return (CommitContext) indexingContext.getCommitInfo().getInfo().get(CommitContext.NAME);
     }
-
 }
Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/MultiplexingIndexWriter.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/MultiplexingIndexWriter.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/MultiplexingIndexWriter.java	(revision )
@@ -22,9 +22,12 @@
 import java.io.IOException;
 import java.util.Map;
 
+import javax.annotation.Nullable;
+
 import com.google.common.collect.Maps;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.mount.Mount;
 import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
@@ -36,16 +39,19 @@
     private final IndexDefinition definition;
     private final NodeBuilder definitionBuilder;
     private final boolean reindex;
+    private GarbageCollectableBlobStore blobStore;
 
     private final Map<Mount, DefaultIndexWriter> writers = Maps.newHashMap();
 
     public MultiplexingIndexWriter(IndexCopier indexCopier, MountInfoProvider mountInfoProvider,
-                                   IndexDefinition definition, NodeBuilder definitionBuilder, boolean reindex) {
+                                   IndexDefinition definition, NodeBuilder definitionBuilder,
+                                   boolean reindex, @Nullable GarbageCollectableBlobStore blobStore) {
         this.indexCopier = indexCopier;
         this.mountInfoProvider = mountInfoProvider;
         this.definition = definition;
         this.definitionBuilder = definitionBuilder;
         this.reindex = reindex;
+        this.blobStore = blobStore;
     }
 
     @Override
@@ -94,6 +100,7 @@
     private DefaultIndexWriter createWriter(Mount m) {
         String dirName = MultiplexersLucene.getIndexDirName(m);
         String suggestDirName = MultiplexersLucene.getSuggestDirName(m);
-        return new DefaultIndexWriter(definition, definitionBuilder, indexCopier, dirName, suggestDirName, reindex);
+        return new DefaultIndexWriter(definition, definitionBuilder, indexCopier, dirName,
+            suggestDirName, reindex, blobStore);
     }
 }
Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriterFactory.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriterFactory.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriterFactory.java	(revision )
@@ -24,25 +24,31 @@
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
 import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 
 public class DefaultIndexWriterFactory implements LuceneIndexWriterFactory {
     private final MountInfoProvider mountInfoProvider;
     private final IndexCopier indexCopier;
+    private final GarbageCollectableBlobStore blobStore;
 
-    public DefaultIndexWriterFactory(MountInfoProvider mountInfoProvider, @Nullable IndexCopier indexCopier) {
+    public DefaultIndexWriterFactory(MountInfoProvider mountInfoProvider,
+        @Nullable IndexCopier indexCopier, @Nullable GarbageCollectableBlobStore blobStore) {
         this.mountInfoProvider = mountInfoProvider;
         this.indexCopier = indexCopier;
+        this.blobStore = blobStore;
     }
 
     @Override
     public LuceneIndexWriter newInstance(IndexDefinition definition,
                                          NodeBuilder definitionBuilder, boolean reindex) {
         if (mountInfoProvider.hasNonDefaultMounts()){
-            return new MultiplexingIndexWriter(indexCopier, mountInfoProvider, definition, definitionBuilder, reindex);
+            return new MultiplexingIndexWriter(indexCopier, mountInfoProvider, definition,
+                definitionBuilder, reindex, blobStore);
         }
         return new DefaultIndexWriter(definition, definitionBuilder, indexCopier,
-                LuceneIndexConstants.INDEX_DATA_CHILD_NAME, LuceneIndexConstants.SUGGEST_DATA_CHILD_NAME, reindex);
+            LuceneIndexConstants.INDEX_DATA_CHILD_NAME,
+            LuceneIndexConstants.SUGGEST_DATA_CHILD_NAME, reindex, blobStore);
     }
 }
Index: git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexTest.java	(revision )
@@ -372,13 +372,13 @@
     }
 
     private void purgeDeletedDocs(NodeBuilder idx, IndexDefinition definition) throws IOException {
-        IndexWriter writer = new IndexWriter(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME), getIndexWriterConfig(definition, true));
+        IndexWriter writer = new IndexWriter(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME, null), getIndexWriterConfig(definition, true));
         writer.forceMergeDeletes();
         writer.close();
     }
 
     public int getDeletedDocCount(NodeBuilder idx, IndexDefinition definition) throws IOException {
-        IndexReader reader = DirectoryReader.open(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME));
+        IndexReader reader = DirectoryReader.open(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME, null));
         int numDeletes = reader.numDeletedDocs();
         reader.close();
         return numDeletes;
Index: git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/MultiplexingIndexWriterTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/MultiplexingIndexWriterTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/MultiplexingIndexWriterTest.java	(revision )
@@ -19,11 +19,15 @@
 
 package org.apache.jackrabbit.oak.plugins.index.lucene.writer;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.List;
 
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.CachingFileDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
 import org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectory;
 import org.apache.jackrabbit.oak.plugins.multiplex.SimpleMountInfoProvider;
@@ -37,7 +41,9 @@
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.store.Directory;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 import static org.apache.jackrabbit.oak.plugins.index.lucene.FieldFactory.newPathField;
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
@@ -50,6 +56,9 @@
 import static org.junit.Assert.assertThat;
 
 public class MultiplexingIndexWriterTest {
+    @Rule
+    public TemporaryFolder folder = new TemporaryFolder(new File("target"));
+
     private NodeState root = INITIAL_CONTENT;
     private NodeBuilder builder = EMPTY_NODE.builder();
     private IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
@@ -66,14 +75,15 @@
 
     @Test
     public void defaultWriterWithNoMounts() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(Mounts.defaultMountInfoProvider(), null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(Mounts.defaultMountInfoProvider(), null,
+            null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
         assertThat(writer, instanceOf(DefaultIndexWriter.class));
     }
 
     @Test
     public void closeWithoutChange() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
         assertFalse(writer.close(0));
         assertEquals(0, Iterables.size(getIndexDirNodes()));
@@ -81,7 +91,7 @@
 
     @Test
     public void writesInDefaultMount() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
 
         //1. Add entry in foo mount
@@ -102,8 +112,34 @@
     }
 
     @Test
+    public void writesInDefaultMountBlobStore() throws Exception {
+        CachingFileDataStore ds = DataStoreUtils
+            .createCachingFDS(folder.newFolder().getAbsolutePath(),
+                folder.newFolder().getAbsolutePath());
+
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, new DataStoreBlobStore(ds));
+        LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
+
+        //1. Add entry in foo mount
+        writer.updateDocument("/libs/config", newDoc("/libs/config"));
+        writer.close(0);
+        List<String> names = getIndexDirNodes();
+        //Only dirNode for mount foo should be present
+        assertThat(names, contains(indexDirName(fooMount)));
+
+        //2. Add entry in default mount
+        writer = factory.newInstance(defn, builder, true);
+        writer.updateDocument("/content", newDoc("/content"));
+        writer.close(0);
+
+        names = getIndexDirNodes();
+        //Dir names for both mounts should be present
+        assertThat(names, containsInAnyOrder(indexDirName(fooMount), indexDirName(defaultMount)));
+    }
+
+    @Test
     public void deletes() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
 
         writer.updateDocument("/libs/config", newDoc("/libs/config"));
@@ -135,7 +171,7 @@
         mip = SimpleMountInfoProvider.newBuilder()
                 .mount("foo", "/content/remote").build();
         initializeMounts();
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
 
         writer.updateDocument("/content/remote/a", newDoc("/content/remote/a"));
@@ -186,4 +222,4 @@
         doc.add(newPathField(path));
         return doc;
     }
-}
\ No newline at end of file
+}
Index: git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/reader/DefaultIndexReaderFactoryTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/reader/DefaultIndexReaderFactoryTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/reader/DefaultIndexReaderFactoryTest.java	(revision )
@@ -19,9 +19,13 @@
 
 package org.apache.jackrabbit.oak.plugins.index.lucene.reader;
 
+import java.io.File;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
 
+import org.apache.jackrabbit.oak.plugins.blob.datastore.CachingFileDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils;
 import org.apache.jackrabbit.oak.plugins.index.lucene.FieldNames;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
 import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants;
@@ -35,7 +39,9 @@
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.IndexReader;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 import static org.apache.jackrabbit.oak.plugins.index.lucene.writer.MultiplexingIndexWriterTest.newDoc;
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
@@ -43,6 +49,9 @@
 import static org.junit.Assert.*;
 
 public class DefaultIndexReaderFactoryTest {
+    @Rule
+    public TemporaryFolder folder = new TemporaryFolder(new File("target"));
+
     private NodeState root = INITIAL_CONTENT;
     private NodeBuilder builder = EMPTY_NODE.builder();
     private IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
@@ -58,7 +67,7 @@
 
     @Test
     public void indexDir() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
 
         writer.updateDocument("/content/en", newDoc("/content/en"));
@@ -89,8 +98,45 @@
     }
 
     @Test
+    public void indexDirWithBlobStore() throws Exception {
+        /* Register a blob store */
+        CachingFileDataStore ds = DataStoreUtils
+            .createCachingFDS(folder.newFolder().getAbsolutePath(),
+                folder.newFolder().getAbsolutePath());
+
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, new DataStoreBlobStore(ds));
+        LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
+
+        writer.updateDocument("/content/en", newDoc("/content/en"));
+        writer.close(0);
+
+        LuceneIndexReaderFactory readerFactory = new DefaultIndexReaderFactory(mip, null);
+        List<LuceneIndexReader> readers = readerFactory.createReaders(defn, builder.getNodeState(),"/foo");
+        assertEquals(1, readers.size());
+
+        LuceneIndexReader reader = readers.get(0);
+        assertNotNull(reader.getReader());
+        assertNull(reader.getSuggestDirectory());
+        assertNull(reader.getLookup());
+
+        assertEquals(1, reader.getReader().numDocs());
+
+        final AtomicBoolean closed = new AtomicBoolean();
+        reader.getReader().addReaderClosedListener(new IndexReader.ReaderClosedListener() {
+            @Override
+            public void onClose(IndexReader reader) {
+                closed.set(true);
+            }
+        });
+
+        reader.close();
+
+        assertTrue(closed.get());
+    }
+
+    @Test
     public void suggesterDir() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         enabledSuggestorForSomeProp();
         defn = new IndexDefinition(root, builder.getNodeState());
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
@@ -110,7 +156,7 @@
 
     @Test
     public void multipleReaders() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
 
         writer.updateDocument("/content/en", newDoc("/content/en"));
@@ -124,7 +170,7 @@
 
     @Test
     public void multipleReaders_SingleSuggester() throws Exception{
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         enabledSuggestorForSomeProp();
         defn = new IndexDefinition(root, builder.getNodeState());
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
@@ -153,4 +199,4 @@
         prop.setProperty("name", "foo");
         prop.setProperty(LuceneIndexConstants.PROP_USE_IN_SUGGEST, true);
     }
-}
\ No newline at end of file
+}
Index: git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorTest.java	(revision )
@@ -43,13 +43,18 @@
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Calendar;
+import java.util.List;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
 import javax.annotation.Nonnull;
 
 import com.google.common.base.StandardSystemProperty;
+import com.google.common.collect.ImmutableList;
 import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.CachingFileDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils;
 import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider;
 import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
 import org.apache.jackrabbit.oak.plugins.index.IndexEditor;
@@ -81,15 +86,16 @@
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.store.Directory;
 import org.junit.After;
-import org.junit.Ignore;
+import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
 
+@RunWith(Parameterized.class)
 public class LuceneIndexEditorTest {
-    private static final EditorHook HOOK = new EditorHook(
-            new IndexUpdateProvider(
-                    new LuceneIndexEditorProvider()));
+    private EditorHook HOOK;
 
     private NodeState root = INITIAL_CONTENT;
 
@@ -101,6 +107,28 @@
 
     @Rule
     public final TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
+
+    @Parameterized.Parameter
+    public boolean useBlobStore;
+
+    @Parameterized.Parameters(name = "{index}: useBlobStore ({0})")
+    public static List<Boolean[]> fixtures() {
+        return ImmutableList.of(new Boolean[] {true}, new Boolean[] {false});
+    }
+
+    @Before
+    public void setup() throws Exception {
+        if (useBlobStore) {
+            LuceneIndexEditorProvider provider = new LuceneIndexEditorProvider();
+            CachingFileDataStore ds = DataStoreUtils
+                .createCachingFDS(temporaryFolder.newFolder().getAbsolutePath(),
+                    temporaryFolder.newFolder().getAbsolutePath());
+            provider.setBlobStore(new DataStoreBlobStore(ds));
+            HOOK = new EditorHook(new IndexUpdateProvider(provider));
+        } else {
+            HOOK = new EditorHook(new IndexUpdateProvider(new LuceneIndexEditorProvider()));
+        }
+    }
 
     @Test
     public void testLuceneWithFullText() throws Exception {
@@ -167,7 +195,7 @@
     }
 
     @Test
-    public void noOfDocsIndexedNonFullText() throws Exception{
+    public void noOfDocsIndexedNonFullText() throws Exception {
         NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
         NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
                 of(TYPENAME_STRING));
@@ -187,10 +215,10 @@
     }
 
     @Test
-    public void saveDirectoryListing() throws Exception{
+    public void saveDirectoryListing() throws Exception {
         NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
         NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
-                of(TYPENAME_STRING));
+            of(TYPENAME_STRING));
         nb.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true);
         nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
         nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
Index: git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobOptions.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobOptions.java	(revision )
+++ git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobOptions.java	(revision )
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob;
+
+/**
+ * Options while writing blobs to the blob store / data store.
+ */
+public class BlobOptions {
+    private UploadType uploadType = UploadType.DEFAULT;
+
+    public UploadType getUpload() {
+        return uploadType;
+    }
+
+    public BlobOptions setUpload(UploadType uploadType) {
+        this.uploadType = uploadType;
+        return this;
+    }
+
+    /**
+     * Specifies the upload type for the blob.
+     */
+    public enum UploadType {
+        SYNCHRONOUS,
+        DEFAULT
+    }
+}
Index: git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java	(revision )
@@ -35,6 +35,9 @@
 import org.apache.commons.lang3.reflect.FieldUtils;
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.CachingFileDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils;
 import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
 import org.apache.jackrabbit.oak.plugins.index.fulltext.ExtractedText;
 import org.apache.jackrabbit.oak.plugins.index.fulltext.PreExtractedTextProvider;
@@ -213,6 +216,23 @@
         MockOsgi.activate(service, context.bundleContext(), config);
 
         assertEquals(4000, BooleanQuery.getMaxClauseCount());
+    }
+
+    @Test
+    public void blobStoreRegistered() throws Exception{
+        MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
+        LuceneIndexEditorProvider editorProvider =
+            (LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
+        assertNull(editorProvider.getBlobStore());
+
+        /* Register a blob store */
+        CachingFileDataStore ds = DataStoreUtils
+            .createCachingFDS(folder.newFolder().getAbsolutePath(),
+                folder.newFolder().getAbsolutePath());
+
+        service.bindBlobStore(new DataStoreBlobStore(ds));
+
+        assertNotNull(editorProvider.getBlobStore());
     }
 
     private Map<String,Object> getDefaultConfig(){
Index: git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java	(revision )
@@ -129,7 +129,7 @@
         IndexDefinition defn = new IndexDefinition(initialContent, defnBuilder.getNodeState());
 
         //1. Have 2 reader created by writes in 2 diff mounts
-        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null);
+        LuceneIndexWriterFactory factory = new DefaultIndexWriterFactory(mip, null, null);
         LuceneIndexWriter writer = factory.newInstance(defn, builder, true);
 
         writer.updateDocument("/content/en", newDoc("/content/en"));
Index: git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/TypedDataStore.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/TypedDataStore.java	(revision )
+++ git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/TypedDataStore.java	(revision )
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.InputStream;
+
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.oak.plugins.blob.BlobOptions;
+
+/**
+ * Interface to provide ability to the {@link org.apache.jackrabbit.core.data.DataStore}
+ * to add records with {@link BlobOptions}.
+ */
+public interface TypedDataStore {
+    /**
+     * Add a record with specified options.
+     *
+     * @param input
+     * @param options
+     * @return
+     * @throws DataStoreException
+     */
+    DataRecord addRecord(InputStream input, BlobOptions options) throws DataStoreException;
+}
+
Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/IndexWriterUtils.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/IndexWriterUtils.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/IndexWriterUtils.java	(revision )
@@ -24,11 +24,14 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import javax.annotation.Nullable;
+
 import org.apache.jackrabbit.oak.plugins.index.lucene.FieldNames;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
 import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants;
 import org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.util.SuggestHelper;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
@@ -38,8 +41,6 @@
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 
-import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.PERSISTENCE_FILE;
-import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.PERSISTENCE_NAME;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.PERSISTENCE_PATH;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.VERSION;
 import static org.apache.lucene.store.NoLockFactory.getNoLockFactory;
@@ -72,7 +73,8 @@
         }
     }
 
-    public static Directory newIndexDirectory(IndexDefinition indexDefinition, NodeBuilder definition, String dirName)
+    public static Directory newIndexDirectory(IndexDefinition indexDefinition,
+            NodeBuilder definition, String dirName, @Nullable GarbageCollectableBlobStore blobStore)
             throws IOException {
         String path = null;
         if (LuceneIndexConstants.PERSISTENCE_FILE.equalsIgnoreCase(
@@ -80,7 +82,7 @@
             path = definition.getString(PERSISTENCE_PATH);
         }
         if (path == null) {
-            return new OakDirectory(definition, dirName, indexDefinition, false);
+            return new OakDirectory(definition, dirName, indexDefinition, false, blobStore);
         } else {
             // try {
             File file = new File(path);
Index: git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java	(revision )
@@ -46,6 +46,7 @@
 import org.apache.jackrabbit.core.data.DataRecord;
 import org.apache.jackrabbit.core.data.DataStoreException;
 import org.apache.jackrabbit.core.data.MultiDataStoreAware;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.TypedDataStore;
 import org.apache.jackrabbit.oak.spi.blob.AbstractDataRecord;
 import org.apache.jackrabbit.oak.spi.blob.AbstractSharedBackend;
 import org.apache.jackrabbit.oak.stats.StatisticsProvider;
@@ -54,6 +55,7 @@
 import org.slf4j.LoggerFactory;
 
 import static com.google.common.base.Preconditions.checkArgument;
+import static org.apache.jackrabbit.oak.plugins.blob.BlobOptions.UploadType.SYNCHRONOUS;
 
 /**
  * Caches files locally and stages files locally for async uploads.
@@ -71,7 +73,7 @@
  * &lt;/DataStore>
  */
 public abstract class AbstractSharedCachingDataStore extends AbstractDataStore
-    implements MultiDataStoreAware, SharedDataStore {
+    implements MultiDataStoreAware, SharedDataStore, TypedDataStore {
     /**
      * Logger instance.
      */
@@ -215,6 +217,12 @@
 
     @Override
     public DataRecord addRecord(InputStream inputStream) throws DataStoreException {
+        return addRecord(inputStream, new BlobOptions());
+    }
+
+    @Override
+    public DataRecord addRecord(InputStream inputStream, BlobOptions blobOptions)
+        throws DataStoreException {
         Stopwatch watch = Stopwatch.createStarted();
         try {
             TransientFileFactory fileFactory = TransientFileFactory.getInstance();
@@ -237,7 +245,8 @@
 
             // asynchronously stage for upload if the size limit of staging cache permits
             // otherwise add to backend
-            if (!cache.stage(identifier.toString(), tmpFile)) {
+            if (blobOptions.getUpload() == SYNCHRONOUS
+                || !cache.stage(identifier.toString(), tmpFile)) {
                 backend.write(identifier, tmpFile);
                 // offer to download cache
                 cache.getDownloadCache().put(identifier.toString(), tmpFile);
Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java	(revision )
@@ -28,6 +28,8 @@
 import java.util.List;
 import java.util.Set;
 
+import javax.annotation.Nullable;
+
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Sets;
 import com.google.common.io.ByteStreams;
@@ -36,6 +38,10 @@
 import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.commons.StringUtils;
+import org.apache.jackrabbit.oak.plugins.blob.BlobOptions;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob;
+import org.apache.jackrabbit.oak.plugins.blob.TypedBlobStore;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.util.PerfLogger;
 import org.apache.lucene.store.AlreadyClosedException;
@@ -59,6 +65,7 @@
 import static org.apache.jackrabbit.JcrConstants.JCR_LASTMODIFIED;
 import static org.apache.jackrabbit.oak.api.Type.BINARIES;
 import static org.apache.jackrabbit.oak.api.Type.STRINGS;
+import static org.apache.jackrabbit.oak.plugins.blob.BlobOptions.UploadType.SYNCHRONOUS;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INDEX_DATA_CHILD_NAME;
 import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty;
 
@@ -67,6 +74,8 @@
  * based on an Oak {@link NodeBuilder}.
  */
 public class OakDirectory extends Directory {
+    private static final Boolean ENABLE_AYNC_DS = Boolean.getBoolean("oak.lucene.ds.async");
+
     static final PerfLogger PERF_LOGGER = new PerfLogger(LoggerFactory.getLogger(OakDirectory.class.getName() + ".perf"));
     static final String PROP_DIR_LISTING = "dirListing";
     static final String PROP_BLOB_SIZE = "blobSize";
@@ -83,12 +92,19 @@
     private final Set<String> fileNames = Sets.newConcurrentHashSet();
     private final boolean activeDeleteEnabled;
     private final String indexName;
+    @Nullable
+    private final GarbageCollectableBlobStore blobStore;
 
     public OakDirectory(NodeBuilder builder, IndexDefinition definition, boolean readOnly) {
-        this(builder, INDEX_DATA_CHILD_NAME, definition, readOnly);
+        this(builder, INDEX_DATA_CHILD_NAME, definition, readOnly, null);
     }
 
     public OakDirectory(NodeBuilder builder, String dataNodeName, IndexDefinition definition, boolean readOnly) {
+        this(builder, dataNodeName, definition, readOnly, null);
+    }
+
+    public OakDirectory(NodeBuilder builder, String dataNodeName, IndexDefinition definition,
+        boolean readOnly, @Nullable GarbageCollectableBlobStore blobStore) {
         this.lockFactory = NoLockFactory.getNoLockFactory();
         this.builder = builder;
         this.directoryBuilder = readOnly ? builder.getChildNode(dataNodeName) : builder.child(dataNodeName);
@@ -97,6 +113,7 @@
         this.fileNames.addAll(getListing());
         this.activeDeleteEnabled = definition.getActiveDeleteEnabled();
         this.indexName = definition.getIndexName();
+        this.blobStore =  blobStore;
     }
 
     @Override
@@ -141,7 +158,7 @@
     @Override
     public long fileLength(String name) throws IOException {
         NodeBuilder file = directoryBuilder.getChildNode(name);
-        OakIndexInput input = new OakIndexInput(name, file, indexName);
+        OakIndexInput input = new OakIndexInput(name, file, indexName, blobStore);
         try {
             return input.length();
         } finally {
@@ -165,7 +182,7 @@
             file = directoryBuilder.child(name);
         }
         fileNames.add(name);
-        return new OakIndexOutput(name, file, indexName);
+        return new OakIndexOutput(name, file, indexName, blobStore);
     }
 
 
@@ -174,7 +191,7 @@
             throws IOException {
         NodeBuilder file = directoryBuilder.getChildNode(name);
         if (file.exists()) {
-            return new OakIndexInput(name, file, indexName);
+            return new OakIndexInput(name, file, indexName, blobStore);
         } else {
             String msg = String.format("[%s] %s", indexName, name);
             throw new FileNotFoundException(msg);
@@ -309,13 +326,17 @@
 
         private final String dirDetails;
 
-        public OakIndexFile(String name, NodeBuilder file, String dirDetails) {
+        private final GarbageCollectableBlobStore blobStore;
+
+        public OakIndexFile(String name, NodeBuilder file, String dirDetails,
+            @Nullable GarbageCollectableBlobStore blobStore) {
             this.name = name;
             this.file = file;
             this.dirDetails = dirDetails;
             this.blobSize = determineBlobSize(file);
             this.uniqueKey = readUniqueKey(file);
             this.blob = new byte[blobSize];
+            this.blobStore = blobStore;
 
             PropertyState property = file.getProperty(JCR_DATA);
             if (property != null && property.getType() == BINARIES) {
@@ -346,6 +367,7 @@
             this.length = that.length;
             this.data = newArrayList(that.data);
             this.dataModified = that.dataModified;
+            this.blobStore = that.blobStore;
         }
 
         private void loadBlob(int i) throws IOException {
@@ -373,7 +395,8 @@
                     in = new SequenceInputStream(in, 
                             new ByteArrayInputStream(uniqueKey));
                 }
-                Blob b = file.createBlob(in);
+
+                Blob b = writeBlob(in);
                 if (index < data.size()) {
                     data.set(index, b);
                 } else {
@@ -385,6 +408,26 @@
             }
         }
 
+        /**
+         * Writes the blob to the blobstore directly if available.
+         *
+         * @param in input stream
+         * @return
+         * @throws IOException
+         */
+        private Blob writeBlob(InputStream in) throws IOException {
+            if (blobStore != null) {
+                if (blobStore instanceof TypedBlobStore && !ENABLE_AYNC_DS) {
+                    return new BlobStoreBlob(blobStore, ((TypedBlobStore) blobStore)
+                        .writeBlob(in, new BlobOptions().setUpload(SYNCHRONOUS)));
+                } else {
+                    return new BlobStoreBlob(blobStore, blobStore.writeBlob(in));
+                }
+            } else {
+                return file.createBlob(in);
+            }
+        }
+
         public void seek(long pos) throws IOException {
             // seek() may be called with pos == length
             // see https://issues.apache.org/jira/browse/LUCENE-1196
@@ -501,10 +544,11 @@
         private final WeakIdentityMap<OakIndexInput, Boolean> clones;
         private final String dirDetails;
 
-        public OakIndexInput(String name, NodeBuilder file, String dirDetails) {
+        public OakIndexInput(String name, NodeBuilder file, String dirDetails,
+            @Nullable GarbageCollectableBlobStore blobStore) {
             super(name);
             this.dirDetails = dirDetails;
-            this.file = new OakIndexFile(name, file, dirDetails);
+            this.file = new OakIndexFile(name, file, dirDetails, blobStore);
             clones = WeakIdentityMap.newConcurrentHashMap();
         }
 
@@ -584,9 +628,9 @@
         private final String dirDetails;
         private final OakIndexFile file;
 
-        public OakIndexOutput(String name, NodeBuilder file, String dirDetails) throws IOException {
+        public OakIndexOutput(String name, NodeBuilder file, String dirDetails, GarbageCollectableBlobStore blobStore) throws IOException {
             this.dirDetails = dirDetails;
-            this.file = new OakIndexFile(name, file, dirDetails);
+            this.file = new OakIndexFile(name, file, dirDetails, blobStore);
         }
 
         @Override
Index: git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/TypedBlobStore.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/TypedBlobStore.java	(revision )
+++ git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/TypedBlobStore.java	(revision )
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
+
+/**
+ * Interface to provide ability to write blob to the {@link GarbageCollectableBlobStore} with
+ * {@link BlobOptions}.
+ */
+public interface TypedBlobStore extends GarbageCollectableBlobStore {
+    /**
+     * Write a blob with specified options.
+     *
+     * @param in the input stream to write
+     * @param options the options to use
+     * @return
+     * @throws IOException
+     */
+    String writeBlob(InputStream in, BlobOptions options) throws IOException;
+}
Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriter.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriter.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriter.java	(revision )
@@ -31,6 +31,7 @@
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
 import org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.util.SuggestHelper;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.util.PerfLogger;
 import org.apache.jackrabbit.util.ISO8601;
@@ -45,7 +46,6 @@
 import org.slf4j.LoggerFactory;
 
 import static com.google.common.base.Preconditions.checkNotNull;
-import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.SUGGEST_DATA_CHILD_NAME;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.TermFactory.newPathTerm;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.writer.IndexWriterUtils.getIndexWriterConfig;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.writer.IndexWriterUtils.newIndexDirectory;
@@ -63,17 +63,26 @@
     private final boolean reindex;
     private IndexWriter writer;
     private Directory directory;
+    private GarbageCollectableBlobStore blobStore;
 
     public DefaultIndexWriter(IndexDefinition definition, NodeBuilder definitionBuilder,
-                              @Nullable IndexCopier indexCopier, String dirName, String suggestDirName, boolean reindex){
+        @Nullable IndexCopier indexCopier, String dirName, String suggestDirName,
+        boolean reindex, @Nullable GarbageCollectableBlobStore blobStore) {
         this.definition = definition;
         this.definitionBuilder = definitionBuilder;
         this.indexCopier = indexCopier;
         this.dirName = dirName;
         this.suggestDirName = suggestDirName;
         this.reindex = reindex;
+        this.blobStore = blobStore;
     }
 
+    public DefaultIndexWriter(IndexDefinition definition, NodeBuilder definitionBuilder,
+                              @Nullable IndexCopier indexCopier, String dirName, String suggestDirName,
+                              boolean reindex) {
+        this(definition, definitionBuilder, indexCopier, dirName, suggestDirName, reindex, null);
+    }
+
     @Override
     public void updateDocument(String path, Iterable<? extends IndexableField> doc) throws IOException {
         getWriter().updateDocument(newPathTerm(path), doc);
@@ -117,7 +126,7 @@
             final long start = PERF_LOGGER.start();
 
             if (updateSuggestions) {
-                updateSuggester(writer.getAnalyzer(), currentTime);
+                updateSuggester(writer.getAnalyzer(), currentTime, blobStore);
                 PERF_LOGGER.end(start, -1, "Completed suggester for directory {}", definition);
             }
 
@@ -135,7 +144,7 @@
     private IndexWriter getWriter() throws IOException {
         if (writer == null) {
             final long start = PERF_LOGGER.start();
-            directory = newIndexDirectory(definition, definitionBuilder, dirName);
+            directory = newIndexDirectory(definition, definitionBuilder, dirName, blobStore);
             IndexWriterConfig config;
             if (indexCopier != null){
                 directory = indexCopier.wrapForWrite(definition, directory, reindex, dirName);
@@ -153,11 +162,14 @@
      * eventually update suggest dictionary
      * @throws IOException if suggest dictionary update fails
      * @param analyzer the analyzer used to update the suggester
+     * @param blobStore
      */
-    private void updateSuggester(Analyzer analyzer, Calendar currentTime) throws IOException {
+    private void updateSuggester(Analyzer analyzer, Calendar currentTime,
+        @Nullable GarbageCollectableBlobStore blobStore) throws IOException {
         NodeBuilder suggesterStatus = definitionBuilder.child(suggestDirName);
         DirectoryReader reader = DirectoryReader.open(writer, false);
-        final OakDirectory suggestDirectory = new OakDirectory(definitionBuilder, suggestDirName, definition, false);
+        final OakDirectory suggestDirectory =
+            new OakDirectory(definitionBuilder, suggestDirName, definition, false, blobStore);
         try {
             SuggestHelper.updateSuggester(suggestDirectory, analyzer, reader);
             suggesterStatus.setProperty("lastUpdated", ISO8601.format(currentTime), Type.DATE);
Index: git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java	(revision )
@@ -59,12 +59,13 @@
 import org.apache.jackrabbit.oak.cache.CacheLIRS;
 import org.apache.jackrabbit.oak.cache.CacheStats;
 import org.apache.jackrabbit.oak.commons.StringUtils;
+import org.apache.jackrabbit.oak.plugins.blob.BlobOptions;
 import org.apache.jackrabbit.oak.plugins.blob.BlobTrackingStore;
 import org.apache.jackrabbit.oak.plugins.blob.SharedDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.TypedBlobStore;
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 import org.apache.jackrabbit.oak.spi.blob.stats.StatsCollectingStreams;
 import org.apache.jackrabbit.oak.spi.blob.stats.BlobStatsCollector;
-import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -73,8 +74,8 @@
  * It also handles inlining binaries if there size is smaller than
  * {@link org.apache.jackrabbit.core.data.DataStore#getMinRecordLength()}
  */
-public class DataStoreBlobStore implements DataStore, BlobStore,
-        GarbageCollectableBlobStore, BlobTrackingStore {
+public class DataStoreBlobStore
+    implements DataStore, BlobStore, TypedBlobStore, BlobTrackingStore, TypedDataStore {
     private final Logger log = LoggerFactory.getLogger(getClass());
 
     protected final DataStore delegate;
@@ -164,7 +165,7 @@
     @Override
     public DataRecord addRecord(InputStream stream) throws DataStoreException {
         try {
-            return writeStream(stream);
+            return writeStream(stream, new BlobOptions());
         } catch (IOException e) {
             throw new DataStoreException(e);
         }
@@ -206,11 +207,15 @@
 
     @Override
     public String writeBlob(InputStream stream) throws IOException {
+        return writeBlob(stream, new BlobOptions());
+    }
+
+    public String writeBlob(InputStream stream, BlobOptions options) throws IOException {
         boolean threw = true;
         try {
             long start = System.nanoTime();
             checkNotNull(stream);
-            DataRecord dr = writeStream(stream);
+            DataRecord dr = writeStream(stream, options);
             String id = getBlobId(dr);
             if (tracker != null && !InMemoryDataRecord.isInstance(id)) {
                 try {
@@ -518,6 +523,15 @@
         return Type.DEFAULT;
     }
 
+
+    @Override
+    public DataRecord addRecord(InputStream input, BlobOptions options) throws DataStoreException {
+        if (delegate instanceof TypedDataStore) {
+            return ((TypedDataStore) delegate).addRecord(input, options);
+        }
+        return delegate.addRecord(input);
+    }
+
     //~---------------------------------------------< Object >
 
     @Override
@@ -590,9 +604,10 @@
      * while large objects are stored in the data store
      *
      * @param in the input stream
+     * @param options
      * @return the value
      */
-    private DataRecord writeStream(InputStream in) throws IOException, DataStoreException {
+    private DataRecord writeStream(InputStream in, BlobOptions options) throws IOException, DataStoreException {
         int maxMemorySize = Math.max(0, delegate.getMinRecordLength() + 1);
         byte[] buffer = new byte[maxMemorySize];
         int pos = 0, len = maxMemorySize;
@@ -613,7 +628,7 @@
         } else {
             // a few bytes are already read, need to re-build the input stream
             in = new SequenceInputStream(new ByteArrayInputStream(buffer, 0, pos), in);
-            record = delegate.addRecord(in);
+            record = addRecord(in, options);
         }
         return record;
     }
Index: git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java	(revision )
@@ -55,6 +55,7 @@
 import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexObserver;
 import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 import org.apache.jackrabbit.oak.spi.commit.BackgroundObserver;
 import org.apache.jackrabbit.oak.plugins.index.lucene.score.ScorerProviderFactory;
 import org.apache.jackrabbit.oak.spi.commit.BackgroundObserverMBean;
@@ -234,6 +235,12 @@
     @Reference
     private MountInfoProvider mountInfoProvider;
 
+    @Reference(cardinality = ReferenceCardinality.OPTIONAL_UNARY,
+        policyOption = ReferencePolicyOption.GREEDY,
+        policy = ReferencePolicy.DYNAMIC
+    )
+    private GarbageCollectableBlobStore blobStore;
+
     private IndexCopier indexCopier;
 
     private File indexDir;
@@ -250,6 +257,8 @@
 
     private DocumentQueue documentQueue;
 
+    private LuceneIndexEditorProvider editorProvider;
+
     @Activate
     private void activate(BundleContext bundleContext, Map<String, ?> config)
             throws NotCompliantMBeanException, IOException {
@@ -356,7 +365,6 @@
 
     private void registerIndexEditor(BundleContext bundleContext, IndexTracker tracker, Map<String, ?> config) throws IOException {
         boolean enableCopyOnWrite = PropertiesUtil.toBoolean(config.get(PROP_COPY_ON_WRITE), PROP_COPY_ON_WRITE_DEFAULT);
-        LuceneIndexEditorProvider editorProvider;
         if (enableCopyOnWrite){
             initializeIndexCopier(bundleContext, config);
             editorProvider = new LuceneIndexEditorProvider(indexCopier, tracker, extractedTextCache,
@@ -366,6 +374,8 @@
             editorProvider = new LuceneIndexEditorProvider(null, tracker, extractedTextCache, augmentorFactory,
                     mountInfoProvider);
         }
+        editorProvider.setBlobStore(blobStore);
+
         regs.add(bundleContext.registerService(IndexEditorProvider.class.getName(), editorProvider, null));
         oakRegs.add(registerMBean(whiteboard,
                 TextExtractionStatsMBean.class,
@@ -576,6 +586,17 @@
         oakRegs.add(whiteboard.register(GCMonitor.class, gcMonitor, emptyMap()));
     }
 
+    private void registerBlobStore(GarbageCollectableBlobStore blobStore) {
+        if (editorProvider != null){
+            if (blobStore != null){
+                log.info("Registering blobStore {} with editorProvider. ", blobStore);
+            } else {
+                log.info("Unregistering blobStore");
+            }
+            editorProvider.setBlobStore(blobStore);
+        }
+    }
+
     protected void bindNodeAggregator(NodeAggregator aggregator) {
         this.nodeAggregator = aggregator;
         initialize();
@@ -594,5 +615,15 @@
     protected void unbindExtractedTextProvider(PreExtractedTextProvider preExtractedTextProvider){
         this.extractedTextProvider = null;
         registerExtractedTextProvider(null);
+    }
+
+    protected void bindBlobStore(GarbageCollectableBlobStore blobStore) {
+        this.blobStore = blobStore;
+        registerBlobStore(blobStore);
+    }
+
+    protected void unbindBlobStore(GarbageCollectableBlobStore blobStore) {
+        this.blobStore = null;
+        registerBlobStore(blobStore);
     }
 }
Index: git/jackrabbit-oak/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreUtils.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreUtils.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreUtils.java	(revision )
@@ -18,9 +18,11 @@
 
 import java.io.File;
 import java.util.Map;
+import java.util.Properties;
 
 import com.google.common.collect.Maps;
 import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.core.data.DataStoreException;
 import org.apache.jackrabbit.core.data.FileDataStore;
 
 import org.apache.jackrabbit.oak.commons.PropertiesUtil;
@@ -92,6 +94,20 @@
         fds.setMinRecordLength(minRecordLength);
         fds.init(null);
         return fds;
+    }
+
+    public static CachingFileDataStore createCachingFDS(String path, String cachePath)
+        throws DataStoreException {
+        Properties props = new Properties();
+        props.setProperty("fsBackendPath", path);
+        CachingFileDataStore ds = new CachingFileDataStore();
+        ds.setMinRecordLength(10);
+        Map<String, ?> config = DataStoreUtils.getConfig();
+        props.putAll(config);
+        PropertiesUtil.populate(ds, Maps.fromProperties(props), false);
+        ds.setProperties(props);
+        ds.init(cachePath);
+        return ds;
     }
 
     @Test
Index: git/jackrabbit-oak/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/CachingDataStoreTest.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- git/jackrabbit-oak/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/CachingDataStoreTest.java	(revision 994e761ce6a39a5b8083f4712f1b8d10dff08d6b)
+++ git/jackrabbit-oak/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/CachingDataStoreTest.java	(revision )
@@ -178,6 +178,32 @@
     }
 
     /**
+     * Add, get, delete with synchronous option.
+     * @throws Exception
+     */
+    @Test
+    public void syncAddGetDelete() throws Exception {
+        File f = copyToFile(randomStream(0, 4 * 1024), folder.newFile());
+        String id = getIdForInputStream(f);
+        FileInputStream fin = new FileInputStream(f);
+        closer.register(fin);
+
+        DataRecord rec = dataStore.addRecord(fin);
+        assertEquals(id, rec.getIdentifier().toString());
+        assertFile(rec.getStream(), f, folder);
+
+        rec = dataStore.getRecordIfStored(new DataIdentifier(id));
+        assertEquals(id, rec.getIdentifier().toString());
+        assertFile(rec.getStream(), f, folder);
+
+        assertEquals(1, Iterators.size(dataStore.getAllIdentifiers()));
+
+        dataStore.deleteRecord(new DataIdentifier(id));
+        rec = dataStore.getRecordIfStored(new DataIdentifier(id));
+        assertNull(rec);
+    }
+
+    /**
      * {@link CompositeDataStoreCache#getIfPresent(String)} when no cache.
      */
     @Test
