diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
index 830fdf8..10944f9 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
@@ -1208,10 +1208,15 @@ static File getFSDir(Directory dir) {
      */
     private static long getFileLength(Directory dir, String fileName){
         try{
-            return dir.fileLength(fileName);
-        } catch (Exception e){
-            return -1;
+            //Check for file presence otherwise internally it results in
+            //an exception to be created
+            if (dir.fileExists(fileName)) {
+                return dir.fileLength(fileName);
+            }
+        } catch (Exception ignore){
+
         }
+        return -1;
     }
 
     //~------------------------------------------< CopyOnReadStatsMBean >
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
index 2592585..5c3191d 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
@@ -75,11 +75,12 @@
 import static com.google.common.collect.Sets.newHashSet;
 import static org.apache.jackrabbit.JcrConstants.JCR_SCORE;
 import static org.apache.jackrabbit.JcrConstants.NT_BASE;
-import static org.apache.jackrabbit.oak.api.Type.BOOLEAN;
 import static org.apache.jackrabbit.oak.api.Type.NAMES;
 import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath;
 import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.DECLARING_NODE_TYPES;
 import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.ENTRY_COUNT_PROPERTY_NAME;
+import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEXING_MODE_NRT;
+import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEXING_MODE_SYNC;
 import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_PATH;
 import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_COUNT;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.*;
@@ -231,6 +232,9 @@
 
     private final String indexPath;
 
+    private final boolean nrtIndexMode;
+    private final boolean syncIndexMode;
+
     @Nullable
     private final String uid;
 
@@ -303,6 +307,8 @@ public IndexDefinition(NodeState root, NodeState defn, @Nullable NodeBuilder def
         this.secureFacets = defn.hasChildNode(FACETS) && getOptionalValue(defn.getChildNode(FACETS), PROP_SECURE_FACETS, true);
         this.suggestEnabled = evaluateSuggestionEnabled();
         this.spellcheckEnabled = evaluateSpellcheckEnabled();
+        this.nrtIndexMode = supportsNRTIndexing(defn);
+        this.syncIndexMode = supportsSyncIndexing(defn);
     }
 
     public NodeState getDefinitionNodeState() {
@@ -329,6 +335,7 @@ public int getBlobSize() {
         return blobSize;
     }
 
+    @CheckForNull
     public Codec getCodec() {
         return codec;
     }
@@ -433,6 +440,15 @@ public String getUniqueId() {
         return uid;
     }
 
+    public boolean isNRTIndexingEnabled() {
+        return nrtIndexMode;
+    }
+
+    public boolean isSyncIndexingEnabled() {
+        return syncIndexMode;
+    }
+
+
     @Override
     public String toString() {
         return "Lucene Index : " + indexName;
@@ -1565,4 +1581,24 @@ private static double getDefaultCostPerEntry(IndexFormatVersion version) {
         return version == IndexFormatVersion.V1 ?  1.5 : 1.0;
     }
 
+    private static boolean supportsNRTIndexing(NodeState defn) {
+        return supportsIndexingMode(new ReadOnlyBuilder(defn), INDEXING_MODE_NRT);
+    }
+
+    private static boolean supportsSyncIndexing(NodeState defn) {
+        return supportsIndexingMode(new ReadOnlyBuilder(defn), INDEXING_MODE_SYNC);
+    }
+
+    public static boolean supportsSyncOrNRTIndexing(NodeBuilder defn) {
+       return supportsIndexingMode(defn, INDEXING_MODE_NRT) || supportsIndexingMode(defn, INDEXING_MODE_SYNC);
+    }
+
+    private static boolean supportsIndexingMode(NodeBuilder defn, String mode) {
+        PropertyState async = defn.getProperty(IndexConstants.ASYNC_PROPERTY_NAME);
+        if (async == null){
+            return false;
+        }
+        return Iterables.contains(async.getValue(Type.STRINGS), mode);
+    }
+
 }
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
index 9c21566..c51fad0 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
@@ -20,34 +20,48 @@
 import static com.google.common.base.Preconditions.checkState;
 
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 
-import com.google.common.base.Preconditions;
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
+
+import com.google.common.collect.Iterables;
 import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndex;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.ReaderRefreshPolicy;
 import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReader;
 import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReaderFactory;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.MultiReader;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester;
 import org.apache.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-class IndexNode {
+public class IndexNode {
 
-    static IndexNode open(String indexPath, NodeState root, NodeState defnNodeState, LuceneIndexReaderFactory readerFactory)
+    static IndexNode open(String indexPath, NodeState root, NodeState defnNodeState,
+                          LuceneIndexReaderFactory readerFactory, @Nullable NRTIndexFactory nrtFactory)
             throws IOException {
         IndexDefinition definition = new IndexDefinition(root, defnNodeState);
         List<LuceneIndexReader> readers = readerFactory.createReaders(definition, defnNodeState, indexPath);
+        NRTIndex nrtIndex = nrtFactory != null ? nrtFactory.createIndex(definition) : null;
         if (!readers.isEmpty()){
-            return new IndexNode(PathUtils.getName(indexPath), definition, readers);
+            return new IndexNode(PathUtils.getName(indexPath), definition, readers, nrtIndex);
         }
         return null;
     }
 
+    private static final Logger log = LoggerFactory.getLogger(IndexNode.class);
+
     private final List<LuceneIndexReader> readers;
 
     private final String name;
@@ -56,17 +70,33 @@ static IndexNode open(String indexPath, NodeState root, NodeState defnNodeState,
 
     private final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    private final IndexSearcher indexSearcher;
+    private volatile IndexSearcher indexSearcher;
+
+    private final NRTIndex nrtIndex;
+
+    private final ReaderRefreshPolicy refreshPolicy;
+
+    private final Runnable refreshCallback = new Runnable() {
+        @Override
+        public void run() {
+            refreshReaders();
+        }
+    };
 
     private boolean closed = false;
 
-    IndexNode(String name, IndexDefinition definition, List<LuceneIndexReader> readers)
+    private List<LuceneIndexReader> nrtReaders;
+
+    IndexNode(String name, IndexDefinition definition, List<LuceneIndexReader> readers, @Nullable NRTIndex nrtIndex)
             throws IOException {
         checkArgument(!readers.isEmpty());
         this.name = name;
         this.definition = definition;
         this.readers = readers;
-        this.indexSearcher = new IndexSearcher(createReader(readers));
+        this.nrtIndex = nrtIndex;
+        this.nrtReaders = getNRTReaders();
+        this.indexSearcher = new IndexSearcher(createReader(nrtReaders));
+        this.refreshPolicy = nrtIndex != null ? nrtIndex.getRefreshPolicy() : ReaderRefreshPolicy.NEVER;
     }
 
     String getName() {
@@ -77,7 +107,7 @@ IndexDefinition getDefinition() {
         return definition;
     }
 
-    IndexSearcher getSearcher() {
+    public IndexSearcher getSearcher() {
         return indexSearcher;
     }
 
@@ -95,11 +125,12 @@ boolean acquire() {
             lock.readLock().unlock();
             return false;
         } else {
+            refreshPolicy.refreshOnReadIfRequired(refreshCallback);
             return true;
         }
     }
 
-    void release() {
+    public void release() {
         lock.readLock().unlock();
     }
 
@@ -112,9 +143,32 @@ void close() throws IOException {
             lock.writeLock().unlock();
         }
 
-       for (LuceneIndexReader reader : readers){
+        //Do not close the NRTIndex here as it might be in use
+        //by newer IndexNode. Just close the readers obtained from
+        //them
+        for (LuceneIndexReader reader : Iterables.concat(readers, getNRTReaders())){
            reader.close();
-       }
+        }
+    }
+
+    @CheckForNull
+    public LuceneIndexWriter getLocalWriter() throws IOException{
+        return nrtIndex != null ? nrtIndex.getWriter() : null;
+    }
+
+    public void refreshReadersOnWriteIfRequired() {
+        refreshPolicy.refreshOnWriteIfRequired(refreshCallback);
+    }
+
+    private void refreshReaders(){
+        List<LuceneIndexReader> newNRTReaders = getNRTReaders();
+        //The list reference would differ if index got updated
+        //so if they are same no need to reinitialize the searcher
+        if (newNRTReaders != nrtReaders) {
+            nrtReaders = newNRTReaders;
+            indexSearcher = new IndexSearcher(createReader(nrtReaders));
+            log.debug("Refreshed reader for index [{}]", definition);
+        }
     }
 
     private LuceneIndexReader getDefaultReader(){
@@ -122,14 +176,21 @@ private LuceneIndexReader getDefaultReader(){
         return readers.get(0);
     }
 
-    private IndexReader createReader(List<LuceneIndexReader> readers) {
-        if (readers.size() == 1){
+    private IndexReader createReader(List<LuceneIndexReader> nrtReaders) {
+        if (readers.size() == 1 && nrtReaders.isEmpty()){
             return readers.get(0).getReader();
         }
-        IndexReader[] readerArr = new IndexReader[readers.size()];
-        for (int i = 0; i < readerArr.length; i++) {
-            readerArr[i] = readers.get(i).getReader();
+        IndexReader[] readerArr = new IndexReader[readers.size() + nrtReaders.size()];
+        int i = 0;
+        for (LuceneIndexReader r : Iterables.concat(readers, nrtReaders)){
+            readerArr[i++] = r.getReader();
         }
         return new MultiReader(readerArr, true);
     }
+
+    private List<LuceneIndexReader> getNRTReaders() {
+        return nrtIndex != null ? nrtIndex.getReaders() : Collections.<LuceneIndexReader>emptyList();
+    }
+
+
 }
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java
index cc4595f..17e386c 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java
@@ -34,7 +34,11 @@
 import java.util.Map;
 import java.util.Set;
 
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
+
 import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReaderFactory;
 import org.apache.jackrabbit.oak.spi.commit.CompositeEditor;
@@ -51,7 +55,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 
-class IndexTracker {
+public class IndexTracker {
 
     /** Logger instance. */
     private static final Logger log = LoggerFactory.getLogger(IndexTracker.class);
@@ -59,6 +63,7 @@
             new PerfLogger(LoggerFactory.getLogger(IndexTracker.class.getName() + ".perf"));
 
     private final LuceneIndexReaderFactory readerFactory;
+    private final NRTIndexFactory nrtFactory;
 
     private NodeState root = EMPTY_NODE;
 
@@ -66,7 +71,7 @@
 
     private volatile boolean refresh;
 
-    IndexTracker() {
+    public IndexTracker() {
         this((IndexCopier)null);
     }
 
@@ -74,8 +79,13 @@
         this(new DefaultIndexReaderFactory(Mounts.defaultMountInfoProvider(), cloner));
     }
 
-    IndexTracker(LuceneIndexReaderFactory readerFactory){
+    IndexTracker(LuceneIndexReaderFactory readerFactory) {
+        this(readerFactory, null);
+    }
+
+    public IndexTracker(LuceneIndexReaderFactory readerFactory, @Nullable NRTIndexFactory nrtFactory){
         this.readerFactory = readerFactory;
+        this.nrtFactory = nrtFactory;
     }
 
     synchronized void close() {
@@ -91,7 +101,7 @@ synchronized void close() {
         }
     }
 
-    synchronized void update(final NodeState root) {
+    public synchronized void update(final NodeState root) {
         if (refresh) {
             this.root = root;
             close();
@@ -115,7 +125,7 @@ private synchronized void diffAndUpdate(final NodeState root) {
                 public void leave(NodeState before, NodeState after) {
                     try {
                         long start = PERF_LOGGER.start();
-                        IndexNode index = IndexNode.open(path, root, after, readerFactory);
+                        IndexNode index = IndexNode.open(path, root, after, readerFactory, nrtFactory);
                         PERF_LOGGER.end(start, -1, "[{}] Index found to be updated. Reopening the IndexNode", path);
                         updates.put(path, index); // index can be null
                     } catch (IOException e) {
@@ -153,7 +163,7 @@ void refresh() {
         refresh = true;
     }
 
-    IndexNode acquireIndexNode(String path) {
+    public IndexNode acquireIndexNode(String path) {
         IndexNode index = indices.get(path);
         if (index != null && index.acquire()) {
             return index;
@@ -162,6 +172,17 @@ IndexNode acquireIndexNode(String path) {
         }
     }
 
+    @CheckForNull
+    public IndexDefinition getIndexDefinition(String indexPath){
+        IndexNode node = indices.get(indexPath);
+        if (node != null){
+            //Accessing the definition should not require
+            //locking as its immutable state
+            return node.getDefinition();
+        }
+        return null;
+    }
+
     Set<String> getIndexNodePaths(){
         return indices.keySet();
     }
@@ -183,7 +204,7 @@ private synchronized IndexNode findIndexNode(String path) {
 
         try {
             if (isLuceneIndexNode(node)) {
-                index = IndexNode.open(path, root, node, readerFactory);
+                index = IndexNode.open(path, root, node, readerFactory, nrtFactory);
                 if (index != null) {
                     checkState(index.acquire());
                     indices = ImmutableMap.<String, IndexNode>builder()
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexConstants.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexConstants.java
index f4efe45..f4b8cd4 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexConstants.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexConstants.java
@@ -16,14 +16,26 @@
  */
 package org.apache.jackrabbit.oak.plugins.index.lucene;
 
-import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.analysis.util.AbstractAnalysisFactory;
 import org.apache.lucene.util.Version;
 
 public interface LuceneIndexConstants {
 
+    enum IndexingMode {
+        SYNC,
+        NRT,
+        ASYNC;
+
+        public String asyncValueName(){
+            return name().toLowerCase();
+        }
+
+        public static IndexingMode from(String indexingMode){
+            return valueOf(indexingMode.toUpperCase());
+        }
+    }
+
     String TYPE_LUCENE = "lucene";
 
     String INDEX_DATA_CHILD_NAME = ":data";
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
index ac6bf25..7b44d3b 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
@@ -275,6 +275,10 @@ public Editor childNodeDeleted(String name, NodeState before)
         return null; // no need to recurse down the removed subtree
     }
 
+    LuceneIndexEditorContext getContext() {
+        return context;
+    }
+
     private boolean addOrUpdate(String path, NodeState state, boolean isUpdate)
             throws CommitFailedException {
         try {
@@ -924,6 +928,10 @@ private boolean isSupportedMediaType(String type) {
     }
 
     private String parseStringValue(Blob v, Metadata metadata, String path, String propertyName) {
+        if (!context.isAsyncIndexing()){
+            //Skip text extraction for sync indexing
+            return null;
+        }
         String text = context.getExtractedTextCache().get(path, propertyName, v, context.isReindex());
         if (text == null){
             text = parseStringValue0(v, metadata, path);
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java
index b1e58bd..207e2a7 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java
@@ -23,6 +23,8 @@
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import javax.annotation.Nullable;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.jackrabbit.oak.api.CommitFailedException;
 import org.apache.jackrabbit.oak.api.Type;
@@ -55,7 +57,7 @@
     private static final PerfLogger PERF_LOGGER =
             new PerfLogger(LoggerFactory.getLogger(LuceneIndexEditorContext.class.getName() + ".perf"));
 
-    private final FacetsConfig facetsConfig;
+    private FacetsConfig facetsConfig;
 
     private static final Parser defaultParser = createDefaultParser();
 
@@ -82,6 +84,8 @@
     private final IndexAugmentorFactory augmentorFactory;
 
     private final NodeState root;
+
+    private final boolean asyncIndexing;
     /**
      * The media types supported by the parser used.
      */
@@ -91,23 +95,26 @@
     //Set for testing ONLY
     private static Clock clock = Clock.SIMPLE;
 
-    LuceneIndexEditorContext(NodeState root, NodeBuilder definition, IndexUpdateCallback updateCallback,
+    LuceneIndexEditorContext(NodeState root, NodeBuilder definition,
+                             @Nullable IndexDefinition indexDefinition,
+                             IndexUpdateCallback updateCallback,
                              LuceneIndexWriterFactory indexWriterFactory,
                              ExtractedTextCache extractedTextCache,
-                             IndexAugmentorFactory augmentorFactory) {
+                             IndexAugmentorFactory augmentorFactory,
+                             boolean asyncIndexing) {
         configureUniqueId(definition);
         this.root = root;
         this.definitionBuilder = definition;
         this.indexWriterFactory = indexWriterFactory;
-        this.definition = new IndexDefinition(root, definition);
+        this.definition = indexDefinition != null ? indexDefinition : new IndexDefinition(root, definition);
         this.indexedNodes = 0;
         this.updateCallback = updateCallback;
         this.extractedTextCache = extractedTextCache;
         this.augmentorFactory = augmentorFactory;
+        this.asyncIndexing = asyncIndexing;
         if (this.definition.isOfOldFormat()){
             IndexDefinition.updateDefinition(definition);
         }
-        this.facetsConfig = FacetHelper.getFacetsConfig(definition);
     }
 
     Parser getParser() {
@@ -177,6 +184,10 @@ public long incIndexedNodes() {
         return indexedNodes;
     }
 
+    public boolean isAsyncIndexing() {
+        return asyncIndexing;
+    }
+
     public long getIndexedNodes() {
         return indexedNodes;
     }
@@ -197,6 +208,9 @@ public IndexDefinition getDefinition() {
     }
 
     FacetsConfig getFacetsConfig() {
+        if (facetsConfig == null){
+            facetsConfig = FacetHelper.getFacetsConfig(definitionBuilder);
+        }
         return facetsConfig;
     }
 
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java
index 60fc3fb..8e5f84e 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProvider.java
@@ -20,17 +20,25 @@
 import javax.annotation.Nullable;
 
 import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.plugins.index.ContextAwareCallback;
 import org.apache.jackrabbit.oak.plugins.index.IndexEditor;
 import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
 import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
+import org.apache.jackrabbit.oak.plugins.index.IndexingContext;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexWriterFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.writer.DefaultIndexWriterFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriterFactory;
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
 import org.apache.jackrabbit.oak.spi.commit.Editor;
 import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
 import org.apache.jackrabbit.oak.spi.mount.Mounts;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.jackrabbit.oak.spi.state.ReadOnlyBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.TYPE_LUCENE;
 
@@ -42,10 +50,19 @@
  * 
  */
 public class LuceneIndexEditorProvider implements IndexEditorProvider {
+    private final Logger log = LoggerFactory.getLogger(getClass());
     private final IndexCopier indexCopier;
     private final ExtractedTextCache extractedTextCache;
     private final IndexAugmentorFactory augmentorFactory;
     private final LuceneIndexWriterFactory indexWriterFactory;
+    private final IndexTracker indexTracker;
+
+    /**
+     * Number of indexed Lucene document that can be held in memory
+     * This ensures that for very large commit memory consumption
+     * is bounded
+     */
+    private int inMemoryDocsLimit = Integer.getInteger("oak.lucene.inMemoryDocsLimit", 500);
 
     public LuceneIndexEditorProvider() {
         this(null);
@@ -65,8 +82,17 @@ public LuceneIndexEditorProvider(@Nullable IndexCopier indexCopier,
                                      ExtractedTextCache extractedTextCache,
                                      @Nullable IndexAugmentorFactory augmentorFactory,
                                      MountInfoProvider mountInfoProvider) {
+        this(indexCopier, null, extractedTextCache, augmentorFactory, mountInfoProvider);
+    }
+
+    public LuceneIndexEditorProvider(@Nullable IndexCopier indexCopier,
+                                     @Nullable IndexTracker indexTracker,
+                                     ExtractedTextCache extractedTextCache,
+                                     @Nullable IndexAugmentorFactory augmentorFactory,
+                                     MountInfoProvider mountInfoProvider) {
         this.indexCopier = indexCopier;
-        this.extractedTextCache = checkNotNull(extractedTextCache);
+        this.indexTracker = indexTracker;
+        this.extractedTextCache = extractedTextCache != null ? extractedTextCache : new ExtractedTextCache(0, 0);
         this.augmentorFactory = augmentorFactory;
         this.indexWriterFactory = new DefaultIndexWriterFactory(checkNotNull(mountInfoProvider), indexCopier);
     }
@@ -77,8 +103,52 @@ public Editor getIndexEditor(
             @Nonnull IndexUpdateCallback callback)
             throws CommitFailedException {
         if (TYPE_LUCENE.equals(type)) {
-            LuceneIndexEditorContext context = new LuceneIndexEditorContext(root, definition, callback,
-                    indexWriterFactory, extractedTextCache, augmentorFactory);
+            checkArgument(callback instanceof ContextAwareCallback, "callback instance not of type " +
+                    "ContextAwareCallback [%s]", callback);
+            IndexingContext indexingContext = ((ContextAwareCallback)callback).getIndexingContext();
+            LuceneIndexWriterFactory writerFactory = indexWriterFactory;
+            IndexDefinition indexDefinition = null;
+            boolean asyncIndexing = true;
+            if (!indexingContext.isAsync() && IndexDefinition.supportsSyncOrNRTIndexing(definition)) {
+
+                //Would not participate in reindexing. Only interested in
+                //incremental indexing
+                if (indexingContext.isReindexing()){
+                    return null;
+                }
+
+                if (!indexingContext.getCommitInfo().getInfo().containsKey(CommitContext.NAME)){
+                    //Logically there should not be any commit without commit context. But
+                    //some initializer code does the commit with out it. So ignore such calls with
+                    //warning now
+                    //TODO Revisit use of warn level once all such cases are analyzed
+                    log.warn("No CommitContext found for commit", new Exception());
+                    return null;
+                }
+
+                //TODO Also check if index has been done once
+
+                writerFactory = new LocalIndexWriterFactory(indexingContext, inMemoryDocsLimit);
+
+                //IndexDefinition from tracker might differ from one passed here for reindexing
+                //case which should be fine. However reusing existing definition would avoid
+                //creating definition instance for each commit as this gets executed for each commit
+                if (indexTracker != null){
+                    indexDefinition = indexTracker.getIndexDefinition(indexingContext.getIndexPath());
+                }
+
+                //Pass on a read only builder to ensure that nothing gets written
+                //at all to NodeStore for local indexing.
+                //TODO [hybrid] This would cause issue with Facets as for faceted fields
+                //some stuff gets written to NodeBuilder. That logic should be refactored
+                //to be moved to LuceneIndexWriter
+                definition = new ReadOnlyBuilder(definition.getNodeState());
+
+                asyncIndexing = false;
+            }
+
+            LuceneIndexEditorContext context = new LuceneIndexEditorContext(root, definition, indexDefinition, callback,
+                    writerFactory, extractedTextCache, augmentorFactory, asyncIndexing);
             return new LuceneIndexEditor(context);
         }
         return null;
@@ -91,4 +161,8 @@ IndexCopier getIndexCopier() {
     ExtractedTextCache getExtractedTextCache() {
         return extractedTextCache;
     }
+
+    public void setInMemoryDocsLimit(int inMemoryDocsLimit) {
+        this.inMemoryDocsLimit = inMemoryDocsLimit;
+    }
 }
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java
index 3218867..4661f32 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderService.java
@@ -51,6 +51,9 @@
 import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
 import org.apache.jackrabbit.oak.plugins.index.aggregate.NodeAggregator;
 import org.apache.jackrabbit.oak.plugins.index.fulltext.PreExtractedTextProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.DocumentQueue;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexObserver;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
 import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
 import org.apache.jackrabbit.oak.spi.commit.BackgroundObserver;
 import org.apache.jackrabbit.oak.plugins.index.lucene.score.ScorerProviderFactory;
@@ -61,6 +64,7 @@
 import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
 import org.apache.jackrabbit.oak.spi.whiteboard.Registration;
 import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
 import org.apache.lucene.analysis.util.CharFilterFactory;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 import org.apache.lucene.analysis.util.TokenizerFactory;
@@ -191,6 +195,23 @@
     )
     private static final String PROP_BOOLEAN_CLAUSE_LIMIT = "booleanClauseLimit";
 
+    private static final boolean PROP_HYBRID_INDEXING_DEFAULT = true;
+    @Property(
+            boolValue = PROP_HYBRID_INDEXING_DEFAULT,
+            label = "Hybrid Indexing",
+            description = "When enabled Lucene NRT Indexing mode would be enabled"
+    )
+    private static final String PROP_HYBRID_INDEXING = "enableHybridIndexing";
+
+    private static final int PROP_HYBRID_QUEUE_SIZE_DEFAULT = 1000;
+    @Property(
+            intValue = PROP_HYBRID_QUEUE_SIZE_DEFAULT,
+            label = "Queue size",
+            description = "Size of in memory queue used for storing Lucene Documents which need to be " +
+                    "added to local index"
+    )
+    private static final String PROP_HYBRID_QUEUE_SIZE = "hybridQueueSize";
+
     private Whiteboard whiteboard;
 
     private BackgroundObserver backgroundObserver;
@@ -201,6 +222,9 @@
     @Reference
     private IndexAugmentorFactory augmentorFactory;
 
+    @Reference
+    private StatisticsProvider statisticsProvider;
+
     @Reference(policy = ReferencePolicy.DYNAMIC,
             cardinality = ReferenceCardinality.OPTIONAL_UNARY,
             policyOption = ReferencePolicyOption.GREEDY
@@ -220,10 +244,17 @@
 
     private ExtractedTextCache extractedTextCache;
 
+    private boolean hybridIndex;
+
+    private NRTIndexFactory nrtIndexFactory;
+
+    private DocumentQueue documentQueue;
+
     @Activate
     private void activate(BundleContext bundleContext, Map<String, ?> config)
             throws NotCompliantMBeanException, IOException {
         boolean disabled = PropertiesUtil.toBoolean(config.get(PROP_DISABLED), PROP_DISABLED_DEFAULT);
+        hybridIndex = PropertiesUtil.toBoolean(config.get(PROP_HYBRID_INDEXING), PROP_DISABLED_DEFAULT);
 
         if (disabled) {
             log.info("Component disabled by configuration");
@@ -236,13 +267,15 @@ private void activate(BundleContext bundleContext, Map<String, ?> config)
         whiteboard = new OsgiWhiteboard(bundleContext);
         threadPoolSize = PropertiesUtil.toInteger(config.get(PROP_THREAD_POOL_SIZE), PROP_THREAD_POOL_SIZE_DEFAULT);
         initializeExtractedTextCache(bundleContext, config);
-        indexProvider = new LuceneIndexProvider(createTracker(bundleContext, config), scorerFactory, augmentorFactory);
+        IndexTracker tracker = createTracker(bundleContext, config);
+        indexProvider = new LuceneIndexProvider(tracker, scorerFactory, augmentorFactory);
         initializeLogging(config);
         initialize();
 
         regs.add(bundleContext.registerService(QueryIndexProvider.class.getName(), indexProvider, null));
         registerObserver(bundleContext, config);
-        registerIndexEditor(bundleContext, config);
+        registerLocalIndexObserver(bundleContext, tracker, config);
+        registerIndexEditor(bundleContext, tracker, config);
 
         oakRegs.add(registerMBean(whiteboard,
                 LuceneIndexMBean.class,
@@ -271,6 +304,14 @@ private void deactivate() throws InterruptedException, IOException {
             indexProvider = null;
         }
 
+        if (documentQueue != null){
+            documentQueue.close();
+        }
+
+        if (nrtIndexFactory != null){
+            nrtIndexFactory.close();
+        }
+
         //Close the copier first i.e. before executorService
         if (indexCopier != null){
             indexCopier.close();
@@ -313,15 +354,17 @@ private void initializeLogging(Map<String, ?> config) {
         }
     }
 
-    private void registerIndexEditor(BundleContext bundleContext, Map<String, ?> config) throws IOException {
+    private void registerIndexEditor(BundleContext bundleContext, IndexTracker tracker, Map<String, ?> config) throws IOException {
         boolean enableCopyOnWrite = PropertiesUtil.toBoolean(config.get(PROP_COPY_ON_WRITE), PROP_COPY_ON_WRITE_DEFAULT);
         LuceneIndexEditorProvider editorProvider;
         if (enableCopyOnWrite){
             initializeIndexCopier(bundleContext, config);
-            editorProvider = new LuceneIndexEditorProvider(indexCopier, extractedTextCache, augmentorFactory, mountInfoProvider);
+            editorProvider = new LuceneIndexEditorProvider(indexCopier, tracker, extractedTextCache,
+                    augmentorFactory,  mountInfoProvider);
             log.info("Enabling CopyOnWrite support. Index files would be copied under {}", indexDir.getAbsolutePath());
         } else {
-            editorProvider = new LuceneIndexEditorProvider(null, extractedTextCache, augmentorFactory, mountInfoProvider);
+            editorProvider = new LuceneIndexEditorProvider(null, tracker, extractedTextCache, augmentorFactory,
+                    mountInfoProvider);
         }
         regs.add(bundleContext.registerService(IndexEditorProvider.class.getName(), editorProvider, null));
         oakRegs.add(registerMBean(whiteboard,
@@ -336,7 +379,10 @@ private IndexTracker createTracker(BundleContext bundleContext, Map<String, ?> c
         if (enableCopyOnRead){
             initializeIndexCopier(bundleContext, config);
             log.info("Enabling CopyOnRead support. Index files would be copied under {}", indexDir.getAbsolutePath());
-            return new IndexTracker(new DefaultIndexReaderFactory(mountInfoProvider, indexCopier));
+            if (hybridIndex) {
+                nrtIndexFactory = new NRTIndexFactory(indexCopier);
+            }
+            return new IndexTracker(new DefaultIndexReaderFactory(mountInfoProvider, indexCopier), nrtIndexFactory);
         }
 
         return new IndexTracker();
@@ -425,6 +471,19 @@ private void registerObserver(BundleContext bundleContext, Map<String, ?> config
         regs.add(bundleContext.registerService(Observer.class.getName(), observer, null));
     }
 
+    private void registerLocalIndexObserver(BundleContext bundleContext, IndexTracker tracker, Map<String, ?> config) {
+        if (!hybridIndex){
+            log.info("Hybrid indexing feature disabled");
+            return;
+        }
+
+        int queueSize = PropertiesUtil.toInteger(config.get(PROP_HYBRID_QUEUE_SIZE), PROP_HYBRID_QUEUE_SIZE_DEFAULT);
+        documentQueue = new DocumentQueue(queueSize, tracker, getExecutorService(), statisticsProvider);
+        LocalIndexObserver localIndexObserver = new LocalIndexObserver(documentQueue, statisticsProvider);
+        regs.add(bundleContext.registerService(Observer.class.getName(), localIndexObserver, null));
+        log.info("Hybrid indexing enabled for configured indexes with queue size of {}", queueSize);
+    }
+
     private void initializeFactoryClassLoaders(ClassLoader classLoader) {
         ClassLoader originalClassLoader = Thread.currentThread()
                 .getContextClassLoader();
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
index 26eb3a4..3c422fc 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
@@ -21,6 +21,7 @@
 
 import java.io.File;
 import java.io.FileFilter;
+import java.io.FilenameFilter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -42,6 +43,7 @@
 import org.apache.jackrabbit.oak.commons.IOUtils;
 import org.apache.jackrabbit.oak.commons.PathUtils;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndex;
 import org.apache.jackrabbit.oak.stats.Clock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -251,6 +253,10 @@ private void gcIndexDirs() throws IOException {
                             "Deletion would be retried later again.",  dir);
                 }
             }
+
+            if (!dirs.isEmpty()) {
+                totalDeletedSize += gcNRTIndexDirs(dirs.get(0));
+            }
             totalDeletedSize += deleteOldFormatDir(dirs.get(0).getJcrPath());
         }
 
@@ -260,6 +266,30 @@ private void gcIndexDirs() throws IOException {
         }
     }
 
+    /**
+     * Removes all directory created by NRTIndex which have
+     * nrt prefix
+     */
+    private long gcNRTIndexDirs(LocalIndexDir idxDir) {
+        final String prefix = getFSSafeName(NRTIndex.NRT_DIR_PREFIX);
+        File[] nrtDirs = idxDir.dir.listFiles(new FilenameFilter() {
+            @Override
+            public boolean accept(File dir, String name) {
+                return name.startsWith(prefix);
+            }
+        });
+
+        long size = 0;
+        if (nrtDirs != null) {
+            for (File f : nrtDirs){
+                size += FileUtils.sizeOf(f);
+                FileUtils.deleteQuietly(f);
+            }
+        }
+
+        return size;
+    }
+
     @CheckForNull
     private LocalIndexDir findMatchingIndexDir(File dir) throws IOException {
         //Resolve to canonical file so that equals can work reliable
@@ -294,7 +324,8 @@ private File getOldFormatDir(String indexPath) {
         return new File(indexRootDir, subDir);
     }
 
-    private static String getFSSafeName(String e) {
+    static String getFSSafeName(String e) {
+        //TODO Exclude -_ like chars via [^\W_]
         return e.replaceAll("\\W", "");
     }
 
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/DocumentQueue.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/DocumentQueue.java
new file mode 100644
index 0000000..1a7ccf2
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/DocumentQueue.java
@@ -0,0 +1,228 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.lang.Thread.UncaughtExceptionHandler;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executor;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Lists;
+import org.apache.jackrabbit.oak.commons.concurrent.NotifyingFutureTask;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexNode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter;
+import org.apache.jackrabbit.oak.stats.CounterStats;
+import org.apache.jackrabbit.oak.stats.MeterStats;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
+import org.apache.jackrabbit.oak.stats.StatsOptions;
+import org.apache.lucene.index.IndexableField;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static com.google.common.base.Preconditions.checkState;
+
+public class DocumentQueue implements Closeable{
+    private static final LuceneDoc STOP = LuceneDoc.forUpdate("", "", Collections.<IndexableField>emptyList());
+    private final Logger log = LoggerFactory.getLogger(getClass());
+    private final IndexTracker tracker;
+    private final BlockingQueue<LuceneDoc> docsQueue;
+    private final Executor executor;
+    private final CounterStats queueSizeStats;
+    private final MeterStats added;
+    private final MeterStats dropped;
+
+    /**
+     * Time in millis for which add call to queue
+     * would wait before dropping off
+     */
+    private final int offerTimeMillis;
+
+    private volatile boolean stopped;
+
+    /**
+     * Handler for uncaught exception on the background thread
+     */
+    private final UncaughtExceptionHandler exceptionHandler = new UncaughtExceptionHandler() {
+        @Override
+        public void uncaughtException(Thread t, Throwable e) {
+            log.error("Uncaught exception", e);
+        }
+    };
+
+    /**
+     * Current background task
+     */
+    private volatile NotifyingFutureTask currentTask = NotifyingFutureTask.completed();
+
+    /**
+     * Completion handler: set the current task to the next task and schedules that one
+     * on the background thread.
+     */
+    private final Runnable completionHandler = new Runnable() {
+        private final Callable<Void> task = new Callable<Void>() {
+            @Override
+            public Void call() throws Exception {
+                try {
+                    int maxSize = docsQueue.size();
+                    List<LuceneDoc> docs = Lists.newArrayListWithCapacity(maxSize);
+                    ListMultimap<String, LuceneDoc> docsPerIndex = ArrayListMultimap.create();
+
+                    //Do the processing in batches
+                    int count = docsQueue.drainTo(docs, maxSize);
+                    if (count == 0) {
+                        return null;
+                    }
+
+                    queueSizeStats.dec(count);
+
+                    for (int i = 0; i < count; i++) {
+                        LuceneDoc doc = docs.get(i);
+                        if (doc == STOP) {
+                            return null;
+                        }
+                        docsPerIndex.get(doc.indexPath).add(doc);
+                    }
+
+                    addAllSynchronously(docsPerIndex.asMap());
+
+                    currentTask.onComplete(completionHandler);
+                } catch (Throwable t) {
+                    exceptionHandler.uncaughtException(Thread.currentThread(), t);
+                }
+                return null;
+            }
+        };
+
+        @Override
+        public void run() {
+            currentTask = new NotifyingFutureTask(task);
+            executor.execute(currentTask);
+        }
+    };
+
+    public DocumentQueue(int maxQueueSize, IndexTracker tracker, Executor executor) {
+        this(maxQueueSize, tracker, executor, StatisticsProvider.NOOP);
+    }
+
+    public DocumentQueue(int maxQueueSize, IndexTracker tracker, Executor executor, StatisticsProvider sp) {
+        this.docsQueue = new LinkedBlockingDeque<>(maxQueueSize);
+        this.tracker = tracker;
+        this.executor = executor;
+        this.offerTimeMillis = 100; //Wait for at most 100 mills while adding stuff to queue
+        this.queueSizeStats = sp.getCounterStats("HYBRID_QUEUE_SIZE", StatsOptions.DEFAULT);
+        this.added = sp.getMeter("HYBRID_ADDED", StatsOptions.DEFAULT);
+        this.dropped = sp.getMeter("HYBRID_DROPPED", StatsOptions.DEFAULT);
+    }
+
+    public boolean add(LuceneDoc doc){
+        checkState(!stopped);
+        boolean added = false;
+        try {
+            added = docsQueue.offer(doc, offerTimeMillis, TimeUnit.MILLISECONDS);
+        } catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+        }
+        // Set the completion handler on the currently running task. Multiple calls
+        // to onComplete are not a problem here since we always pass the same value.
+        // Thus there is no question as to which of the handlers will effectively run.
+        currentTask.onComplete(completionHandler);
+        if (added) {
+            queueSizeStats.inc();
+        } else {
+            dropped.mark();
+        }
+        return added;
+    }
+
+    public void addAllSynchronously(Map<String, Collection<LuceneDoc>> docsPerIndex) {
+        //If required it can optimized by indexing diff indexes in parallel
+        //Something to consider if it becomes a bottleneck
+        for (Map.Entry<String, Collection<LuceneDoc>> e : docsPerIndex.entrySet()) {
+            processDocs(e.getKey(), e.getValue());
+            added.mark(e.getValue().size());
+        }
+    }
+
+    List<LuceneDoc> getQueuedDocs(){
+        List<LuceneDoc> docs = Lists.newArrayList();
+        docs.addAll(docsQueue);
+        return docs;
+    }
+
+    private void processDocs(String indexPath, Iterable<LuceneDoc> docs){
+
+        //Drop the write call if stopped
+        if (stopped) {
+            return;
+        }
+
+        IndexNode indexNode = tracker.acquireIndexNode(indexPath);
+        if (indexNode == null) {
+            log.debug("No IndexNode found for index [{}].", indexPath);
+            return;
+        }
+
+        try{
+            LuceneIndexWriter writer = indexNode.getLocalWriter();
+            for (LuceneDoc doc : docs) {
+                if (writer == null) {
+                    //IndexDefinition per IndexNode might have changed and local
+                    //indexing is disabled. Ignore
+                    log.debug("No local IndexWriter found for index [{}]. Skipping index " +
+                            "entry for [{}]", indexPath, doc.docPath);
+                    return;
+                }
+                if (doc.delete) {
+                    writer.deleteDocuments(doc.docPath);
+                } else {
+                    writer.updateDocument(doc.docPath, doc.doc);
+                }
+                log.trace("Updated index with doc {}", doc);
+            }
+            indexNode.refreshReadersOnWriteIfRequired();
+        } catch (Exception e) {
+            //For now we just log it. Later we need to see if frequent error then to
+            //temporarily disable indexing for this index
+            log.warn("Error occurred while indexing index [{}]",indexPath, e);
+        } finally {
+            indexNode.release();
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        //Its fine to "drop" any entry in queue as
+        //local index is meant for running state only
+        docsQueue.clear();
+        docsQueue.add(STOP);
+        stopped = true;
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/IndexUpdateListener.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/IndexUpdateListener.java
new file mode 100644
index 0000000..1028be6
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/IndexUpdateListener.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+public interface IndexUpdateListener extends ReaderRefreshPolicy {
+
+    void updated();
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexObserver.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexObserver.java
new file mode 100644
index 0000000..e5ccf0f
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexObserver.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.Observer;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class LocalIndexObserver implements Observer{
+    private final Logger log = LoggerFactory.getLogger(getClass());
+    private final DocumentQueue docQueue;
+
+    public LocalIndexObserver(DocumentQueue docQueue, StatisticsProvider sp) {
+        this.docQueue = docQueue;
+    }
+
+    @Override
+    public void contentChanged(@Nonnull NodeState root, @Nullable CommitInfo info) {
+        //TODO [hybrid] Do external diff?
+        if (info == null){
+           return;
+        }
+
+        CommitContext commitContext = (CommitContext) info.getInfo().get(CommitContext.NAME);
+        //Commit done internally i.e. one not using Root/Tree API
+        if (commitContext == null){
+            return;
+        }
+
+        LuceneDocumentHolder holder = (LuceneDocumentHolder) commitContext.get(LuceneDocumentHolder.NAME);
+        //Nothing to be indexed
+        if (holder == null){
+            return;
+        }
+
+        commitContext.remove(LuceneDocumentHolder.NAME);
+
+        int droppedCount = 0;
+        for (LuceneDoc doc : holder.getNRTIndexedDocs()){
+            if (!docQueue.add(doc)) {
+                droppedCount++;
+            }
+        }
+
+        //After nrt docs add all sync indexed docs
+        //Doing it *after* ensures thar nrt index might catch
+        //up by the time sync one are finished
+        docQueue.addAllSynchronously(holder.getSyncIndexedDocs());
+
+        if (droppedCount > 0){
+            //TODO Ensure that log do not flood
+            log.warn("Dropped [{}] docs from indexing as queue is full", droppedCount);
+        }
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexWriterFactory.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexWriterFactory.java
new file mode 100644
index 0000000..66f8786
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexWriterFactory.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.IOException;
+import java.util.List;
+
+import com.google.common.base.Preconditions;
+import org.apache.jackrabbit.oak.plugins.index.IndexingContext;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriterFactory;
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.lucene.index.IndexableField;
+
+public class LocalIndexWriterFactory implements LuceneIndexWriterFactory {
+    public static final String COMMIT_PROCESSED_BY_LOCAL_LUCENE_EDITOR = "commitProcessedByLocalLuceneEditor";
+    private final IndexingContext indexingContext;
+    private final CommitContext commitContext;
+    private final int inMemoryDocsLimit;
+
+    public LocalIndexWriterFactory(IndexingContext indexingContext, int inMemoryDocsLimit) {
+        this.indexingContext = indexingContext;
+        this.commitContext = getCommitContext(indexingContext);
+        this.inMemoryDocsLimit = inMemoryDocsLimit;
+    }
+
+    private LuceneDocumentHolder getDocumentHolder(){
+        LuceneDocumentHolder holder = (LuceneDocumentHolder) commitContext.get(LuceneDocumentHolder.NAME);
+        if (holder == null) {
+            //lazily initialize the holder
+            holder = new LuceneDocumentHolder();
+            commitContext.set(LuceneDocumentHolder.NAME, holder);
+        }
+        return holder;
+    }
+
+    private static CommitContext getCommitContext(IndexingContext indexingContext) {
+        CommitContext commitContext = (CommitContext) indexingContext.getCommitInfo().getInfo().get(CommitContext.NAME);
+        return Preconditions.checkNotNull(commitContext, "No commit context found in commit info");
+    }
+
+    @Override
+    public LuceneIndexWriter newInstance(IndexDefinition definition, NodeBuilder definitionBuilder, boolean reindex) {
+        return new LocalIndexWriter(definition);
+    }
+
+    private class LocalIndexWriter implements LuceneIndexWriter {
+        private final IndexDefinition definition;
+        private List<LuceneDoc> docList;
+
+        public LocalIndexWriter(IndexDefinition definition) {
+            this.definition = definition;
+        }
+
+        @Override
+        public void updateDocument(String path, Iterable<? extends IndexableField> doc) throws IOException {
+            addLuceneDoc(LuceneDoc.forUpdate(definition.getIndexPathFromConfig(), path, doc));
+        }
+
+        @Override
+        public void deleteDocuments(String path) throws IOException {
+            //Hybrid index logic drops the deletes. So no use to
+            //add them to the list
+            //addLuceneDoc(LuceneDoc.forDelete(definition.getIndexPathFromConfig(), path));
+        }
+
+        @Override
+        public boolean close(long timestamp) throws IOException {
+            //This is used by testcase
+            commitContext.set(COMMIT_PROCESSED_BY_LOCAL_LUCENE_EDITOR, Boolean.TRUE);
+            //always return false as nothing gets written to the index
+            return false;
+        }
+
+        private void addLuceneDoc(LuceneDoc luceneDoc) {
+            if (docList == null){
+                if (definition.isSyncIndexingEnabled()){
+                    docList = getDocumentHolder().getSyncIndexedDocList(indexingContext.getIndexPath());
+                } else if (definition.isNRTIndexingEnabled()){
+                    docList = getDocumentHolder().getNRTIndexedDocList(indexingContext.getIndexPath());
+                } else {
+                    throw new IllegalStateException("Should not be invoked for any other indexing " +
+                            "mode apart from 'sync' and 'nrt'");
+                }
+            }
+
+            if (definition.isNRTIndexingEnabled()
+                    && getDocumentHolder().checkLimitAndLogWarning(inMemoryDocsLimit)){
+               return;
+            }
+
+            docList.add(luceneDoc);
+        }
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LuceneDoc.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LuceneDoc.java
new file mode 100644
index 0000000..554bbc8
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LuceneDoc.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import javax.annotation.Nullable;
+
+import org.apache.lucene.index.IndexableField;
+
+class LuceneDoc {
+    final String indexPath;
+    final String docPath;
+    final Iterable<? extends IndexableField> doc;
+    final boolean delete;
+
+    public static LuceneDoc forUpdate(String indexPath, String path, Iterable<? extends IndexableField> doc){
+        return new LuceneDoc(indexPath, path, doc, false);
+    }
+
+    public static LuceneDoc forDelete(String indexPath, String path){
+        return new LuceneDoc(indexPath, path, null, true);
+    }
+
+    private LuceneDoc(String indexPath, String path, @Nullable Iterable<? extends IndexableField> doc, boolean delete) {
+        this.docPath = path;
+        this.indexPath = indexPath;
+        this.doc = doc;
+        this.delete = delete;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("%s(%s)", indexPath, docPath);
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LuceneDocumentHolder.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LuceneDocumentHolder.java
new file mode 100644
index 0000000..8421d79
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LuceneDocumentHolder.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class LuceneDocumentHolder {
+    private static final Logger log = LoggerFactory.getLogger(LuceneDocumentHolder.class);
+    public static final String NAME = "oak.lucene.documentHolder";
+
+    private final ListMultimap<String, LuceneDoc> nrtIndexedList = ArrayListMultimap.create();
+    private final ListMultimap<String, LuceneDoc> syncIndexedList = ArrayListMultimap.create();
+    private boolean limitWarningLogged;
+
+    public List<LuceneDoc> getNRTIndexedDocList(String indexPath) {
+        return nrtIndexedList.get(indexPath);
+    }
+
+    public Iterable<LuceneDoc> getNRTIndexedDocs(){
+        return nrtIndexedList.values();
+    }
+
+    public List<LuceneDoc> getSyncIndexedDocList(String indexPath) {
+        return syncIndexedList.get(indexPath);
+    }
+
+    public Map<String, Collection<LuceneDoc>> getSyncIndexedDocs(){
+        return syncIndexedList.asMap();
+    }
+
+    public boolean checkLimitAndLogWarning(int maxSize){
+        if (nrtIndexedList.size() >= maxSize){
+            if (!limitWarningLogged){
+                log.warn("Number of in memory documents meant for hybrid indexing has " +
+                        "exceeded limit [{}]. Some documents would be dropped", maxSize);
+                limitWarningLogged = true;
+            }
+            return true;
+        }
+        return false;
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndex.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndex.java
new file mode 100644
index 0000000..5b6773c
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndex.java
@@ -0,0 +1,267 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReader;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.IndexWriterUtils;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.NRTCachingDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+
+public class NRTIndex implements Closeable {
+    private static final AtomicInteger COUNTER = new AtomicInteger();
+    private static final Logger log = LoggerFactory.getLogger(NRTIndex.class);
+
+    /**
+     * Prefix used for naming the directory created for NRT indexes
+     */
+    public static final String NRT_DIR_PREFIX = "nrt-";
+
+    private final IndexDefinition definition;
+    private final IndexCopier indexCopier;
+    private final LuceneIndexReader previousReader;
+    private final IndexUpdateListener refreshPolicy;
+
+    private IndexWriter indexWriter;
+    private NRTIndexWriter nrtIndexWriter;
+    private File indexDir;
+    private Directory directory;
+    private DirectoryReader dirReader;
+    private boolean closed;
+    private List<LuceneIndexReader> readers;
+
+    public NRTIndex(IndexDefinition definition, IndexCopier indexCopier,
+                    IndexUpdateListener refreshPolicy, @Nullable NRTIndex previous) {
+        this.definition = definition;
+        this.indexCopier = indexCopier;
+        this.refreshPolicy = refreshPolicy;
+        this.previousReader = previous != null ? previous.getPrimaryReader() : null;
+    }
+
+    @CheckForNull
+    LuceneIndexReader getPrimaryReader() {
+        DirectoryReader reader = createReader();
+        return reader != null ? new NRTReader(reader) : null;
+    }
+
+    public LuceneIndexWriter getWriter() throws IOException {
+        checkState(!closed);
+        if (nrtIndexWriter == null) {
+            nrtIndexWriter = createWriter();
+        }
+        return nrtIndexWriter;
+    }
+
+    /**
+     * Returns the list of LuceneIndexReader. If the writer has not received
+     * any updates between 2 calls to this method then same list would be
+     * returned.
+     */
+    public synchronized List<LuceneIndexReader> getReaders() {
+        checkState(!closed);
+        DirectoryReader latestReader = createReader();
+        //reader not changed i.e. no change in index
+        //reuse old readers
+        if (latestReader == dirReader && readers != null){
+            return readers;
+        }
+        List<LuceneIndexReader> newReaders = Lists.newArrayListWithCapacity(2);
+        if (latestReader != null) {
+            newReaders.add(new NRTReader(latestReader));
+        }
+
+        //Old reader should be added later
+        if (previousReader != null) {
+            newReaders.add(previousReader);
+        }
+        dirReader = latestReader;
+        readers = ImmutableList.copyOf(newReaders);
+        return readers;
+    }
+
+    public ReaderRefreshPolicy getRefreshPolicy() {
+        return refreshPolicy;
+    }
+
+    public synchronized void close() throws IOException {
+        if (closed) {
+            return;
+        }
+        if (indexWriter != null) {
+            //TODO Close call can possibly be speeded up by
+            //avoiding merge and dropping stuff in memory. To be explored
+            //indexWrite.close(waitForMerges)
+            indexWriter.close();
+            directory.close();
+            FileUtils.deleteQuietly(indexDir);
+            log.debug("[{}] Removed directory [{}]", this, indexDir);
+        }
+
+        closed = true;
+    }
+
+    public boolean isClosed() {
+        return closed;
+    }
+
+    @Override
+    public String toString() {
+        return definition.getIndexPathFromConfig();
+    }
+
+    //For test
+    File getIndexDir() {
+        return indexDir;
+    }
+
+    /**
+     * If index was updated then a new reader would be returned otherwise
+     * existing reader would be returned
+     */
+    @CheckForNull
+    private synchronized DirectoryReader createReader() {
+        checkState(!closed);
+        //Its possible that readers are obtained
+        //before anything gets indexed
+        if (indexWriter == null) {
+            return null;
+        }
+        DirectoryReader result = dirReader;
+        try {
+            //applyDeletes is false as layers above would take care of
+            //stale result
+            if (dirReader == null) {
+                result = DirectoryReader.open(indexWriter, false);
+            } else {
+                DirectoryReader newReader = DirectoryReader.openIfChanged(dirReader, indexWriter, false);
+                if (newReader != null) {
+                    result = newReader;
+                }
+            }
+            return result;
+        } catch (IOException e) {
+            log.warn("Error opening index [{}]", e);
+        }
+        return null;
+    }
+
+    private synchronized NRTIndexWriter createWriter() throws IOException {
+        String dirName = generateDirName();
+        indexDir = indexCopier.getIndexDir(definition, definition.getIndexPathFromConfig(), dirName);
+        Directory fsdir = FSDirectory.open(indexDir);
+        //TODO make these configurable
+        directory = new NRTCachingDirectory(fsdir, 1, 1);
+        IndexWriterConfig config = IndexWriterUtils.getIndexWriterConfig(definition, false);
+
+        //TODO Explore following for optimizing indexing speed
+        //config.setUseCompoundFile(false);
+        //config.setRAMBufferSizeMB(1024*1024*25);
+
+        indexWriter = new IndexWriter(directory, config);
+        return new NRTIndexWriter(indexWriter);
+    }
+
+    public static String generateDirName() {
+        long uniqueCount = System.currentTimeMillis() + COUNTER.incrementAndGet();
+        return NRT_DIR_PREFIX + uniqueCount;
+    }
+
+    private static class NRTReader implements LuceneIndexReader {
+        private final IndexReader indexReader;
+
+        public NRTReader(IndexReader indexReader) {
+            this.indexReader = checkNotNull(indexReader);
+        }
+
+        @Override
+        public IndexReader getReader() {
+            return indexReader;
+        }
+
+        @Override
+        public AnalyzingInfixSuggester getLookup() {
+            return null;
+        }
+
+        @Override
+        public Directory getSuggestDirectory() {
+            return null;
+        }
+
+        @Override
+        public void close() throws IOException {
+
+        }
+    }
+
+    private class NRTIndexWriter implements LuceneIndexWriter {
+        private final IndexWriter indexWriter;
+
+        public NRTIndexWriter(IndexWriter indexWriter) {
+            this.indexWriter = indexWriter;
+        }
+
+        @Override
+        public void updateDocument(String path, Iterable<? extends IndexableField> doc) throws IOException {
+            //For NRT case documents are never updated
+            //instead they are just added. This would cause duplicates
+            //That should be taken care at query side via unique cursor
+            indexWriter.addDocument(doc);
+            refreshPolicy.updated();
+        }
+
+        @Override
+        public void deleteDocuments(String path) throws IOException {
+            //Do not delete documents. Query side would handle it
+        }
+
+        @Override
+        public boolean close(long timestamp) throws IOException {
+            throw new IllegalStateException("Close should not be called");
+        }
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexFactory.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexFactory.java
new file mode 100644
index 0000000..eac06d2
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexFactory.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import javax.annotation.CheckForNull;
+
+import com.google.common.collect.LinkedListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.stats.Clock;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class NRTIndexFactory implements Closeable{
+    private final Logger log = LoggerFactory.getLogger(getClass());
+    private final ListMultimap<String, NRTIndex> indexes = LinkedListMultimap.create();
+    private final IndexCopier indexCopier;
+    private final Clock clock;
+    private final long refreshDeltaInSecs;
+
+    public NRTIndexFactory(IndexCopier indexCopier) {
+        this(indexCopier, Clock.SIMPLE, 1);
+    }
+
+    public NRTIndexFactory(IndexCopier indexCopier, Clock clock, long refreshDeltaInSecs) {
+        this.indexCopier = checkNotNull(indexCopier);
+        this.clock = clock;
+        this.refreshDeltaInSecs = refreshDeltaInSecs;
+    }
+
+    //This would not be invoked concurrently
+    // but still mark it synchronized for safety
+    @CheckForNull
+    public synchronized NRTIndex createIndex(IndexDefinition definition) {
+        if (!(definition.isNRTIndexingEnabled() || definition.isSyncIndexingEnabled())){
+            return null;
+        }
+        String indexPath = definition.getIndexPathFromConfig();
+        NRTIndex current = new NRTIndex(definition, indexCopier, getRefreshPolicy(definition), getPrevious(indexPath));
+        indexes.put(indexPath, current);
+        closeLast(indexPath);
+        return current;
+    }
+
+    @Override
+    public void close() throws IOException {
+        for (NRTIndex index : indexes.values()){
+            index.close();
+        }
+        indexes.clear();
+    }
+
+    List<NRTIndex> getIndexes(String path){
+        return indexes.get(path);
+    }
+
+    private void closeLast(String indexPath) {
+        List<NRTIndex> existing = indexes.get(indexPath);
+        if (existing.size() < 3){
+            return;
+        }
+        NRTIndex oldest = existing.remove(0);
+        try {
+            oldest.close();
+        } catch (IOException e) {
+            log.warn("Error occurred while closing index [{}]", oldest, e);
+        }
+    }
+
+    private NRTIndex getPrevious(String indexPath) {
+        List<NRTIndex> existing = indexes.get(indexPath);
+        if (existing.isEmpty()){
+            return null;
+        }
+        checkArgument(existing.size() <= 2, "Found [%s] more than 3 index", existing.size());
+        return existing.get(existing.size() - 1);
+    }
+
+    private IndexUpdateListener getRefreshPolicy(IndexDefinition definition) {
+        if (definition.isSyncIndexingEnabled()){
+            return new RefreshOnWritePolicy();
+            //return new RefreshOnReadPolicy(clock, TimeUnit.SECONDS, refreshDeltaInSecs);
+        }
+        return new TimedRefreshPolicy(clock, TimeUnit.SECONDS, refreshDeltaInSecs);
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/ReaderRefreshPolicy.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/ReaderRefreshPolicy.java
new file mode 100644
index 0000000..d4574af
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/ReaderRefreshPolicy.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+public interface ReaderRefreshPolicy {
+    ReaderRefreshPolicy NEVER = new ReaderRefreshPolicy() {
+        @Override
+        public void refreshOnReadIfRequired(Runnable refreshCallback) {
+            //Never refresh
+        }
+
+        @Override
+        public void refreshOnWriteIfRequired(Runnable refreshCallback) {
+            //Never refresh
+        }
+    };
+
+    /**
+     * This would be invoked before any query is performed
+     * to provide a chance for IndexNode to refresh the readers
+     *
+     * <p>The index may or may not be updated when this method
+     * is invoked
+     *
+     * @param refreshCallback callback to refresh the readers
+     */
+    void refreshOnReadIfRequired(Runnable refreshCallback);
+
+    /**
+     * This would invoked after some writes have been performed
+     * and as a final step refresh request is being made.
+     *
+     * <p>Any time its invoked it can be assumed that index has been
+     * updated
+     *
+     * @param refreshCallback callback to refresh the readers
+     */
+    void refreshOnWriteIfRequired(Runnable refreshCallback);
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnReadPolicy.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnReadPolicy.java
new file mode 100644
index 0000000..d44e3cc
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnReadPolicy.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.jackrabbit.oak.stats.Clock;
+
+/**
+ * This policy ensures that any writes that have been done to index are made visible
+ * *before* any read is performed. Its meant as an alternative to {@link RefreshOnWritePolicy}
+ * and for "sync" indexes. For "nrt" indexes {@link TimedRefreshPolicy} should be preferred
+ *
+ * <p>The readers are not refreshed immediately upon write. Instead they would be refreshed if
+ *
+ * <ul>
+ *     <li>Upon write if refreshDelta time has elapsed then readers would be refreshed</li>
+ *     <li>Upon read if index is found to be updated then again readers would be refreshed</li>
+ * </ul>
+ *
+ * <p>This policy can result in some contention if index is being frequently updated and
+ * queried.
+ *
+ * *This is an experimental policy. Currently it causes high contention*
+ */
+public class RefreshOnReadPolicy implements ReaderRefreshPolicy, IndexUpdateListener {
+    private final AtomicBoolean dirty = new AtomicBoolean();
+    private final Object lock = new Object();
+    private final Clock clock;
+    private final long refreshDelta;
+    private volatile long lastRefreshTime;
+
+    public RefreshOnReadPolicy(Clock clock, TimeUnit unit, long refreshDelta) {
+        this.clock = clock;
+        this.refreshDelta = unit.toMillis(refreshDelta);
+    }
+
+    @Override
+    public void refreshOnReadIfRequired(Runnable refreshCallback) {
+        if (dirty.get()){
+            refreshWithLock(refreshCallback, false);
+        }
+    }
+
+    @Override
+    public void refreshOnWriteIfRequired(Runnable refreshCallback) {
+        long currentTime = clock.getTime();
+        if (currentTime - lastRefreshTime > refreshDelta) {
+            //Do not set dirty instead directly refresh
+            refreshWithLock(refreshCallback, true);
+        } else {
+            synchronized (lock){
+                //Needs to be done in a lock otherwise
+                //refreshWithLock would override this
+                dirty.set(true);
+            }
+        }
+    }
+
+    @Override
+    public void updated() {
+        //Detect dirty based on call from refreshOnWriteIfRequired
+        //as that would *always* be called if the index has been updated
+        //And ensures that it gets calls after all changes for that index
+        //for that transaction got committed
+    }
+
+    private void refreshWithLock(Runnable refreshCallback, boolean forceRefresh) {
+        synchronized (lock){
+            if (dirty.get() || forceRefresh) {
+                refreshCallback.run();
+                dirty.set(false);
+                lastRefreshTime = clock.getTime();
+            }
+        }
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnWritePolicy.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnWritePolicy.java
new file mode 100644
index 0000000..0661acd
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnWritePolicy.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Policy which performs immediate refresh upon completion of writes
+ */
+public class RefreshOnWritePolicy implements ReaderRefreshPolicy, IndexUpdateListener {
+    private final AtomicBoolean dirty = new AtomicBoolean();
+
+    @Override
+    public void refreshOnReadIfRequired(Runnable refreshCallback) {
+        //As writer itself refreshes the index. No refresh done
+        //on read
+    }
+
+    @Override
+    public void refreshOnWriteIfRequired(Runnable refreshCallback) {
+        //For sync indexing mode we refresh the reader immediately
+        //on the writer thread. So that any read call later sees upto date index
+        if (dirty.get()) {
+            refreshCallback.run();
+            dirty.set(false);
+        }
+    }
+
+    @Override
+    public void updated() {
+        dirty.set(true);
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/TimedRefreshPolicy.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/TimedRefreshPolicy.java
new file mode 100644
index 0000000..6860836
--- /dev/null
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/TimedRefreshPolicy.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.jackrabbit.oak.stats.Clock;
+
+public class TimedRefreshPolicy implements ReaderRefreshPolicy, IndexUpdateListener {
+    private final AtomicBoolean dirty = new AtomicBoolean();
+    private final Clock clock;
+    private final long refreshDelta;
+    private volatile long lastRefreshTime;
+
+    public TimedRefreshPolicy(Clock clock, TimeUnit unit, long refreshDelta) {
+        this.clock = clock;
+        this.refreshDelta = unit.toMillis(refreshDelta);
+    }
+
+    @Override
+    public void refreshOnReadIfRequired(Runnable refreshCallback) {
+        refreshIfRequired(refreshCallback);
+    }
+
+    @Override
+    public void refreshOnWriteIfRequired(Runnable refreshCallback) {
+        refreshIfRequired(refreshCallback);
+    }
+
+    @Override
+    public void updated() {
+        dirty.set(true);
+    }
+
+    private void refreshIfRequired(Runnable refreshCallback) {
+        if (dirty.get()){
+            long currentTime = clock.getTime();
+            if (currentTime - lastRefreshTime > refreshDelta
+                    && dirty.compareAndSet(true, false)){
+                lastRefreshTime = currentTime;
+                refreshCallback.run();
+            }
+        }
+    }
+}
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/IndexDefinitionBuilder.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/IndexDefinitionBuilder.java
index 5fd6f41..268a942 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/IndexDefinitionBuilder.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/IndexDefinitionBuilder.java
@@ -37,6 +37,7 @@
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 
+import static com.google.common.base.Preconditions.checkNotNull;
 import static java.util.Arrays.asList;
 import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
 import static org.apache.jackrabbit.JcrConstants.NT_UNSTRUCTURED;
@@ -75,6 +76,17 @@ public IndexDefinitionBuilder excludedPaths(String ... paths){
         return this;
     }
 
+    public IndexDefinitionBuilder codec(String codecName){
+        tree.setProperty(LuceneIndexConstants.CODEC_NAME, checkNotNull(codecName));
+        return this;
+    }
+
+    public IndexDefinitionBuilder async(String ... asyncVals){
+        tree.removeProperty("async");
+        tree.setProperty("async", asList(asyncVals), STRINGS);
+        return this;
+    }
+
     public NodeState build(){
         return builder.getNodeState();
     }
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinitionTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinitionTest.java
index e4b5b37..714377a 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinitionTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinitionTest.java
@@ -29,6 +29,7 @@
 import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition.IndexingRule;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
 import org.apache.jackrabbit.oak.plugins.index.lucene.util.TokenizerChain;
 import org.apache.jackrabbit.oak.plugins.tree.TreeFactory;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
@@ -77,6 +78,7 @@
     public void defaultConfig() throws Exception{
         IndexDefinition idxDefn = new IndexDefinition(root, builder.getNodeState());
         assertTrue(idxDefn.saveDirListing());
+        assertFalse(idxDefn.isNRTIndexingEnabled());
     }
 
     @Test
@@ -866,6 +868,21 @@ public void uniqueId() throws Exception{
 
     }
 
+    @Test
+    public void nrt() throws Exception{
+        TestUtil.enableIndexingMode(builder, IndexingMode.NRT);
+        IndexDefinition idxDefn = new IndexDefinition(root, builder.getNodeState());
+        assertTrue(idxDefn.isNRTIndexingEnabled());
+    }
+
+    @Test
+    public void sync() throws Exception{
+        TestUtil.enableIndexingMode(builder, IndexingMode.SYNC);
+        IndexDefinition idxDefn = new IndexDefinition(root, builder.getNodeState());
+        assertFalse(idxDefn.isNRTIndexingEnabled());
+        assertTrue(idxDefn.isSyncIndexingEnabled());
+    }
+
     //TODO indexesAllNodesOfMatchingType - with nullCheckEnabled
 
     private static IndexingRule getRule(IndexDefinition defn, String typeName){
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlannerTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlannerTest.java
index 6b7ea71..f1db1fb 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlannerTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlannerTest.java
@@ -768,11 +768,11 @@ private IndexNode createSuggestionOrSpellcheckIndex(String nodeType,
     //------ END - Suggestion/spellcheck plan tests
 
     private IndexNode createIndexNode(IndexDefinition defn, long numOfDocs) throws IOException {
-        return new IndexNode("foo", defn, new TestReaderFactory(createSampleDirectory(numOfDocs)).createReaders(defn, EMPTY_NODE, "foo"));
+        return new IndexNode("foo", defn, new TestReaderFactory(createSampleDirectory(numOfDocs)).createReaders(defn, EMPTY_NODE, "foo"), null);
     }
 
     private IndexNode createIndexNode(IndexDefinition defn) throws IOException {
-        return new IndexNode("foo", defn, new TestReaderFactory(createSampleDirectory()).createReaders(defn, EMPTY_NODE, "foo"));
+        return new IndexNode("foo", defn, new TestReaderFactory(createSampleDirectory()).createReaders(defn, EMPTY_NODE, "foo"), null);
     }
 
     private FilterImpl createFilter(String nodeTypeName) {
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProviderTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProviderTest.java
new file mode 100644
index 0000000..099d335
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorProviderTest.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import org.apache.commons.lang3.reflect.FieldUtils;
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.core.SimpleCommitContext;
+import org.apache.jackrabbit.oak.plugins.index.ContextAwareCallback;
+import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
+import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
+import org.apache.jackrabbit.oak.plugins.index.IndexingContext;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.Editor;
+import org.apache.jackrabbit.oak.spi.mount.Mounts;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.junit.Test;
+
+import static com.google.common.collect.ImmutableSet.of;
+import static org.apache.jackrabbit.oak.api.Type.STRINGS;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.TYPE_LUCENE;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper.newLucenePropertyIndexDefinition;
+import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty;
+import static org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent.INITIAL_CONTENT;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class LuceneIndexEditorProviderTest {
+    private NodeState root = INITIAL_CONTENT;
+    private NodeBuilder builder = root.builder();
+
+    @Test
+    public void readOnlyBuilderUsedForSync() throws Exception {
+        LuceneIndexEditorProvider editorProvider = new LuceneIndexEditorProvider(null,
+                null,
+                null,
+                null,
+                Mounts.defaultMountInfoProvider());
+
+        IndexUpdateCallback callback = new TestCallback("/oak:index/fooIndex", newCommitInfo(), false, false);
+        NodeBuilder defnBuilder = createIndexDefinition("fooIndex").builder();
+        Editor editor = editorProvider.getIndexEditor(TYPE_LUCENE, defnBuilder, root, callback);
+        LuceneIndexEditor luceneEditor = (LuceneIndexEditor) editor;
+
+        NodeBuilder builderFromContext =
+                (NodeBuilder) FieldUtils.readField(luceneEditor.getContext(), "definitionBuilder", true);
+
+        try {
+            builderFromContext.setProperty("foo", "bar");
+            fail("Should have been read only builder");
+        } catch (UnsupportedOperationException ignore) {
+
+        }
+    }
+
+    @Test
+    public void reuseOldIndexDefinition() throws Exception{
+        IndexTracker tracker = mock(IndexTracker.class);
+        LuceneIndexEditorProvider editorProvider = new LuceneIndexEditorProvider(null,
+                tracker,
+                null,
+                null,
+                Mounts.defaultMountInfoProvider());
+
+        //Set up a different IndexDefinition which needs to be returned
+        //from tracker with a marker property
+        NodeBuilder testBuilder = createIndexDefinition("fooIndex").builder();
+        testBuilder.setProperty("foo", "bar");
+        IndexDefinition defn = new IndexDefinition(root, testBuilder.getNodeState());
+        when(tracker.getIndexDefinition("/oak:index/fooIndex")).thenReturn(defn);
+
+        IndexUpdateCallback callback = new TestCallback("/oak:index/fooIndex", newCommitInfo(), false, false);
+        NodeBuilder defnBuilder = createIndexDefinition("fooIndex").builder();
+        Editor editor = editorProvider.getIndexEditor(TYPE_LUCENE, defnBuilder, root, callback);
+        LuceneIndexEditor luceneEditor = (LuceneIndexEditor) editor;
+        LuceneIndexEditorContext context = luceneEditor.getContext();
+
+        //Definition should reflect the marker property
+        assertEquals("bar", context.getDefinition().getDefinitionNodeState().getString("foo"));
+    }
+
+    @Test
+    public void editorNullInCaseOfReindex() throws Exception{
+        LuceneIndexEditorProvider editorProvider = new LuceneIndexEditorProvider(null,
+                null,
+                null,
+                null,
+                Mounts.defaultMountInfoProvider());
+
+        IndexUpdateCallback callback = new TestCallback("/oak:index/fooIndex", newCommitInfo(), true, false);
+        NodeBuilder defnBuilder = createIndexDefinition("fooIndex").builder();
+        Editor editor = editorProvider.getIndexEditor(TYPE_LUCENE, defnBuilder, root, callback);
+        assertNull(editor);
+    }
+
+    private NodeState createIndexDefinition(String idxName) {
+        NodeBuilder idx = newLucenePropertyIndexDefinition(builder.child("oak:index"),
+                idxName, ImmutableSet.of("foo"), "async");
+        TestUtil.enableIndexingMode(idx, IndexingMode.NRT);
+        LuceneIndexEditorContext.configureUniqueId(idx);
+        IndexDefinition.updateDefinition(idx);
+        return idx.getNodeState();
+    }
+
+    private CommitInfo newCommitInfo() {
+        CommitInfo info = new CommitInfo("admin", "s1",
+                ImmutableMap.<String, Object>of(CommitContext.NAME, new SimpleCommitContext()));
+        return info;
+    }
+
+    private static class TestCallback implements IndexUpdateCallback, IndexingContext, ContextAwareCallback {
+        private final String indexPath;
+        private final CommitInfo commitInfo;
+        private final boolean reindexing;
+        private final boolean async;
+
+        private TestCallback(String indexPath, CommitInfo commitInfo, boolean reindexing, boolean async) {
+            this.indexPath = indexPath;
+            this.commitInfo = commitInfo;
+            this.reindexing = reindexing;
+            this.async = async;
+        }
+
+        @Override
+        public String getIndexPath() {
+            return indexPath;
+        }
+
+        @Override
+        public CommitInfo getCommitInfo() {
+            return commitInfo;
+        }
+
+        @Override
+        public boolean isReindexing() {
+            return reindexing;
+        }
+
+        @Override
+        public boolean isAsync() {
+            return async;
+        }
+
+        @Override
+        public void indexUpdate() throws CommitFailedException {
+
+        }
+
+        @Override
+        public IndexingContext getIndexingContext() {
+            return this;
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java
index 8efe533..0a00f72 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexProviderServiceTest.java
@@ -32,6 +32,7 @@
 import java.util.Map;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.reflect.FieldUtils;
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean;
 import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
@@ -43,6 +44,7 @@
 import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
 import org.apache.jackrabbit.oak.spi.mount.Mounts;
 import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.util.InfoStream;
 import org.apache.sling.testing.mock.osgi.MockOsgi;
@@ -70,6 +72,7 @@
     @Before
     public void setUp(){
         context.registerService(MountInfoProvider.class, Mounts.defaultMountInfoProvider());
+        context.registerService(StatisticsProvider.class, StatisticsProvider.NOOP);
         context.registerService(ScorerProviderFactory.class, ScorerProviderFactory.DEFAULT);
         context.registerService(IndexAugmentorFactory.class, mock(IndexAugmentorFactory.class));
         MockOsgi.injectServices(service, context.bundleContext());
@@ -99,6 +102,8 @@ public void defaultSetup() throws Exception{
 
         assertEquals(1024, BooleanQuery.getMaxClauseCount());
 
+        assertNotNull(FieldUtils.readDeclaredField(service, "documentQueue", true));
+
         MockOsgi.deactivate(service);
     }
 
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
index 8bb33a8..c6faa83 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
@@ -242,14 +242,7 @@ public void fulltextSearchWithCustomAnalyzer() throws Exception {
     }
 
     private Tree createFulltextIndex(Tree index, String name) throws CommitFailedException {
-        Tree def = index.addChild(INDEX_DEFINITIONS_NAME).addChild(name);
-        def.setProperty(JcrConstants.JCR_PRIMARYTYPE,
-                INDEX_DEFINITIONS_NODE_TYPE, Type.NAME);
-        def.setProperty(TYPE_PROPERTY_NAME, LuceneIndexConstants.TYPE_LUCENE);
-        def.setProperty(REINDEX_PROPERTY_NAME, true);
-        def.setProperty(createProperty(LuceneIndexConstants.INCLUDE_PROPERTY_TYPES,
-                of(PropertyType.TYPENAME_STRING, PropertyType.TYPENAME_BINARY), STRINGS));
-        return index.getChild(INDEX_DEFINITIONS_NAME).getChild(name);
+        return TestUtil.createFulltextIndex(index, name);
     }
 
     @Test
@@ -2369,12 +2362,7 @@ private Tree createFileNode(Tree tree, String name, String content, String mimeT
     }
 
     private Tree createFileNode(Tree tree, String name, Blob content, String mimeType){
-        Tree fileNode = tree.addChild(name);
-        fileNode.setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_FILE, Type.NAME);
-        Tree jcrContent = fileNode.addChild(JCR_CONTENT);
-        jcrContent.setProperty(JcrConstants.JCR_DATA, content);
-        jcrContent.setProperty(JcrConstants.JCR_MIMETYPE, mimeType);
-        return jcrContent;
+        return TestUtil.createFileNode(tree, name, content, mimeType);
     }
 
     private Tree usc(Tree parent, String childName){
@@ -2422,7 +2410,7 @@ private Tree createIndex(String name, Set<String> propNames) throws CommitFailed
         return createIndex(index, name, propNames);
     }
 
-    static Tree createIndex(Tree index, String name, Set<String> propNames) throws CommitFailedException {
+    public static Tree createIndex(Tree index, String name, Set<String> propNames) throws CommitFailedException {
         Tree def = index.addChild(INDEX_DEFINITIONS_NAME).addChild(name);
         def.setProperty(JcrConstants.JCR_PRIMARYTYPE,
                 INDEX_DEFINITIONS_NODE_TYPE, Type.NAME);
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java
index 041c480..37cbb2f 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/MultiplexingLucenePropertyIndexTest.java
@@ -140,7 +140,7 @@ public void numDocsIsSumOfAllReaders() throws Exception{
         LuceneIndexReaderFactory readerFactory = new DefaultIndexReaderFactory(mip, null);
         List<LuceneIndexReader> readers = readerFactory.createReaders(defn, builder.getNodeState(),"/foo");
 
-        IndexNode node = new IndexNode("foo", defn, readers);
+        IndexNode node = new IndexNode("foo", defn, readers, null);
 
         //3 Obtain the plan
         FilterImpl filter = createFilter("nt:base");
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/TestUtil.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/TestUtil.java
index c73ac74..7a974d9 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/TestUtil.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/TestUtil.java
@@ -25,15 +25,21 @@
 
 import javax.annotation.Nonnull;
 import javax.annotation.Nullable;
+import javax.jcr.PropertyType;
 import javax.jcr.Repository;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.jackrabbit.JcrConstants;
 import org.apache.jackrabbit.api.JackrabbitRepository;
+import org.apache.jackrabbit.oak.api.Blob;
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.jackrabbit.oak.api.Root;
 import org.apache.jackrabbit.oak.api.Tree;
 import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
 import org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper;
 import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
 import org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState;
@@ -49,6 +55,14 @@
 import org.apache.jackrabbit.oak.spi.state.NodeStore;
 
 import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.collect.ImmutableSet.of;
+import static org.apache.jackrabbit.JcrConstants.JCR_CONTENT;
+import static org.apache.jackrabbit.oak.api.Type.STRINGS;
+import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME;
+import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NODE_TYPE;
+import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_PROPERTY_NAME;
+import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME;
+import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty;
 
 public class TestUtil {
     private static final AtomicInteger COUNTER = new AtomicInteger();
@@ -183,9 +197,55 @@ public static NodeBuilder createNodeWithType(NodeBuilder builder, String nodeNam
         return builder;
     }
 
+    public static Tree createFileNode(Tree tree, String name, Blob content, String mimeType){
+        Tree fileNode = tree.addChild(name);
+        fileNode.setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_FILE, Type.NAME);
+        Tree jcrContent = fileNode.addChild(JCR_CONTENT);
+        jcrContent.setProperty(JcrConstants.JCR_DATA, content);
+        jcrContent.setProperty(JcrConstants.JCR_MIMETYPE, mimeType);
+        return jcrContent;
+    }
+
+    public static Tree createFulltextIndex(Tree index, String name) throws CommitFailedException {
+        Tree def = index.addChild(INDEX_DEFINITIONS_NAME).addChild(name);
+        def.setProperty(JcrConstants.JCR_PRIMARYTYPE,
+                INDEX_DEFINITIONS_NODE_TYPE, Type.NAME);
+        def.setProperty(TYPE_PROPERTY_NAME, LuceneIndexConstants.TYPE_LUCENE);
+        def.setProperty(REINDEX_PROPERTY_NAME, true);
+        def.setProperty(createProperty(LuceneIndexConstants.INCLUDE_PROPERTY_TYPES,
+                of(PropertyType.TYPENAME_STRING, PropertyType.TYPENAME_BINARY), STRINGS));
+        return index.getChild(INDEX_DEFINITIONS_NAME).getChild(name);
+    }
+
     public static void shutdown(Repository repository) {
         if (repository instanceof JackrabbitRepository) {
             ((JackrabbitRepository) repository).shutdown();
         }
     }
+
+    public static NodeBuilder enableIndexingMode(NodeBuilder builder, IndexingMode indexingMode){
+        builder.setProperty(createAsyncProperty(indexingMode));
+        return builder;
+    }
+
+    public static Tree enableIndexingMode(Tree tree, IndexingMode indexingMode){
+        tree.setProperty(createAsyncProperty(indexingMode));
+        return tree;
+    }
+
+    private static PropertyState createAsyncProperty(String indexingMode) {
+        return createProperty(IndexConstants.ASYNC_PROPERTY_NAME, of(indexingMode , "async"), STRINGS);
+    }
+
+    private static PropertyState createAsyncProperty(IndexingMode indexingMode) {
+        switch(indexingMode) {
+            case NRT  :
+            case SYNC :
+                return createAsyncProperty(indexingMode.asyncValueName());
+            case ASYNC:
+                return createProperty(IndexConstants.ASYNC_PROPERTY_NAME, of("async"), STRINGS);
+            default:
+                throw new IllegalArgumentException("Unknown mode " + indexingMode);
+        }
+    }
 }
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectoryTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectoryTest.java
index 93f6ac1..b1cb67a 100644
--- a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectoryTest.java
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectoryTest.java
@@ -28,6 +28,7 @@
 import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
 import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
 import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorContext;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndex;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.junit.Before;
@@ -195,6 +196,21 @@ public void gcIndexDirsOnStart() throws Exception{
         assertEquals(1, dir.getLocalIndexes("/a").size());
     }
 
+    @Test
+    public void gcNRTDirsOnStart() throws Exception{
+        configureUniqueId();
+
+        File fa0 = dir.getIndexDir(getDefn(), "/a", "default");
+        File nrt0 = dir.getIndexDir(getDefn(), "/a", NRTIndex.generateDirName());
+        File nrt1 = dir.getIndexDir(getDefn(), "/a", NRTIndex.generateDirName());
+
+        //Now reinitialize
+        dir = new IndexRootDirectory(temporaryFolder.getRoot());
+        assertFalse(nrt0.exists());
+        assertFalse(nrt1.exists());
+        assertTrue(fa0.exists());
+    }
+
     private NodeBuilder resetBuilder() {
         builder = EMPTY_NODE.builder();
         return builder;
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/DocumentQueueTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/DocumentQueueTest.java
new file mode 100644
index 0000000..a800359
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/DocumentQueueTest.java
@@ -0,0 +1,356 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.base.Stopwatch;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ListMultimap;
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.core.SimpleCommitContext;
+import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexNode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.TestUtil;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.EditorHook;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.jackrabbit.oak.stats.Clock;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import static com.google.common.util.concurrent.MoreExecutors.sameThreadExecutor;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.FieldFactory.newPathField;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexObserverTest.NOOP_EXECUTOR;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper.newLucenePropertyIndexDefinition;
+import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent.INITIAL_CONTENT;
+import static org.apache.jackrabbit.oak.spi.mount.Mounts.defaultMountInfoProvider;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public class DocumentQueueTest {
+    @Rule
+    public TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
+
+    private NodeState root = INITIAL_CONTENT;
+    private NodeBuilder builder = root.builder();
+    private EditorHook asyncHook;
+    private EditorHook syncHook;
+    private CommitInfo info;
+
+    private IndexTracker tracker = new IndexTracker();
+    private NRTIndexFactory indexFactory;
+    private Clock clock = new Clock.Virtual();
+    private long refreshDelta = TimeUnit.SECONDS.toMillis(1);
+
+    @Before
+    public void setUp() throws IOException {
+        IndexEditorProvider editorProvider = new LuceneIndexEditorProvider(
+                null,
+                null,
+                null,
+                defaultMountInfoProvider()
+        );
+
+        syncHook = new EditorHook(new IndexUpdateProvider(editorProvider));
+        asyncHook = new EditorHook(new IndexUpdateProvider(editorProvider, "async", false));
+    }
+
+    @Test
+    public void dropDocOnLimit() throws Exception{
+        DocumentQueue queue = new DocumentQueue(2, tracker, NOOP_EXECUTOR);
+        assertTrue(queue.add(LuceneDoc.forDelete("foo", "bar")));
+        assertTrue(queue.add(LuceneDoc.forDelete("foo", "bar")));
+
+        //3rd one would be dropped as queue size is 2
+        assertFalse(queue.add(LuceneDoc.forDelete("foo", "bar")));
+    }
+
+    @Test
+    public void noIssueIfNoIndex() throws Exception{
+        DocumentQueue queue = new DocumentQueue(2, tracker, sameThreadExecutor());
+        assertTrue(queue.add(LuceneDoc.forDelete("foo", "bar")));
+        assertTrue(queue.getQueuedDocs().isEmpty());
+    }
+
+    @Test
+    public void closeQueue() throws Exception{
+        DocumentQueue queue = new DocumentQueue(2, tracker, sameThreadExecutor());
+        queue.close();
+
+        try {
+            queue.add(LuceneDoc.forDelete("foo", "bar"));
+            fail();
+        } catch(IllegalStateException ignore){
+
+        }
+    }
+
+    @Test
+    public void noIssueIfNoWriter() throws Exception{
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        DocumentQueue queue = new DocumentQueue(2, tracker, sameThreadExecutor());
+
+        tracker.update(indexed);
+        assertTrue(queue.add(LuceneDoc.forDelete("/oak:index/fooIndex", "bar")));
+    }
+
+    @Test
+    public void updateDocument() throws Exception{
+        IndexTracker tracker = createTracker();
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        tracker.update(indexed);
+        DocumentQueue queue = new DocumentQueue(2, tracker, sameThreadExecutor());
+
+        Document d1 = new Document();
+        d1.add(newPathField("/a/b"));
+        d1.add(new StringField("foo", "a", Field.Store.NO));
+        queue.add(LuceneDoc.forUpdate("/oak:index/fooIndex", "/a/b", d1));
+
+        List<NRTIndex> indexes = indexFactory.getIndexes("/oak:index/fooIndex");
+        NRTIndex index = indexes.get(indexes.size() - 1);
+        assertEquals(1, index.getPrimaryReader().getReader().numDocs());
+    }
+
+    @Test
+    public void indexRefresh() throws Exception{
+        tracker = createTracker();
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        tracker.update(indexed);
+
+        clock.waitUntil(refreshDelta);
+
+        DocumentQueue queue = new DocumentQueue(2, tracker, sameThreadExecutor());
+
+        TopDocs td = doSearch("bar");
+        assertEquals(1, td.totalHits);
+
+        addDoc(queue, "/a/b", "bar");
+
+        //First update would be picked as base time was zero which would now
+        //get initialized
+        td = doSearch("bar");
+        assertEquals(2, td.totalHits);
+
+        addDoc(queue, "/a/c", "bar");
+
+        //Now it would not update as refresh interval has not exceeded
+        td = doSearch("bar");
+        assertEquals(2, td.totalHits);
+
+        addDoc(queue, "/a/d", "bar");
+
+        //Get past the delta time
+        clock.waitUntil(clock.getTime() + refreshDelta + 1);
+
+        //Now it should show updated result
+        td = doSearch("bar");
+        assertEquals(4, td.totalHits);
+
+        //Phase 2 - Check affect of async index update cycle
+        //With that there should only be 2 copies of NRTIndex kept
+        indexed = doAsyncIndex(indexed, "a2", "bar");
+
+        tracker.update(indexed);
+
+        //Now result would be latest from async + last local
+        td = doSearch("bar");
+        assertEquals(5, td.totalHits);
+
+        //Now there would be to NRTIndex - previous and current
+        //so add to current and query again
+        addDoc(queue, "/a/e", "bar");
+        td = doSearch("bar");
+        assertEquals(6, td.totalHits);
+
+        //Now do another async update
+        indexed = doAsyncIndex(indexed, "a3", "bar");
+
+        tracker.update(indexed);
+
+        //Now total count would be 4
+        //3 from async and 1 from current
+        td = doSearch("bar");
+        assertEquals(4, td.totalHits);
+    }
+
+    @Test
+    public void addAllSync() throws Exception{
+        ListMultimap<String, LuceneDoc> docs = ArrayListMultimap.create();
+        tracker = createTracker();
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.SYNC);
+        tracker.update(indexed);
+
+        DocumentQueue queue = new DocumentQueue(2, tracker, sameThreadExecutor());
+
+        TopDocs td = doSearch("bar");
+        assertEquals(1, td.totalHits);
+
+        docs.get("/oak:index/fooIndex").add(createDoc("/a/c", "bar"));
+        queue.addAllSynchronously(docs.asMap());
+
+        td = doSearch("bar");
+        assertEquals(2, td.totalHits);
+
+        docs.clear();
+
+        docs.get("/oak:index/fooIndex").add(createDoc("/a/d", "bar"));
+        queue.addAllSynchronously(docs.asMap());
+
+        td = doSearch("bar");
+        assertEquals(3, td.totalHits);
+    }
+
+    //@Test
+    public void benchMarkIndexWriter() throws Exception{
+        Executor executor = Executors.newFixedThreadPool(5);
+        IndexCopier indexCopier = new IndexCopier(executor, temporaryFolder.getRoot());
+        indexFactory = new NRTIndexFactory(indexCopier, clock, TimeUnit.MILLISECONDS.toSeconds(refreshDelta));
+        tracker = new IndexTracker(
+                new DefaultIndexReaderFactory(defaultMountInfoProvider(), indexCopier),
+                indexFactory
+        );
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        tracker.update(indexed);
+
+        DocumentQueue queue = new DocumentQueue(1000, tracker, executor);
+
+        /*
+            Sample output
+            [nrt] Time taken for 10000 is 639.3 ms with waits 1
+            [sync] Time taken for 10000 is 30.34 s
+
+            Refreshing reader after every commit would slow down things
+         */
+
+        LuceneDoc doc = createDoc("/a/b", "a");
+        int numDocs = 10000;
+        Stopwatch w = Stopwatch.createStarted();
+        int waitCount = 0;
+        for (int i = 0; i < numDocs; i++) {
+            while(!queue.add(doc)){
+                waitCount++;
+            }
+        }
+
+        System.out.printf("%n[nrt] Time taken for %d is %s with waits %d%n", numDocs, w, waitCount);
+
+        indexed = createAndPopulateAsyncIndex(IndexingMode.SYNC);
+        tracker.update(indexed);
+        queue = new DocumentQueue(1000, tracker, executor);
+
+        w = Stopwatch.createStarted();
+        for (int i = 0; i < numDocs; i++) {
+            ListMultimap<String, LuceneDoc> docs = ArrayListMultimap.create();
+            docs.get("/oak:index/fooIndex").add(doc);
+            queue.addAllSynchronously(docs.asMap());
+        }
+        System.out.printf("%n[sync] Time taken for %d is %s%n", numDocs, w);
+
+    }
+
+    private NodeState doAsyncIndex(NodeState current, String childName, String fooValue) throws CommitFailedException {
+        //Have some stuff to be indexed
+        NodeBuilder builder = current.builder();
+        builder.child(childName).setProperty("foo", fooValue);
+        NodeState after = builder.getNodeState();
+        return asyncHook.processCommit(current, after, newCommitInfo());
+    }
+
+    private TopDocs doSearch(String fooValue) throws IOException {
+        IndexNode indexNode = tracker.acquireIndexNode("/oak:index/fooIndex");
+        try {
+            return indexNode.getSearcher().search(new TermQuery(new Term("foo", fooValue)), 10);
+        } finally {
+            indexNode.release();
+        }
+    }
+
+    private void addDoc(DocumentQueue queue, String docPath, String fooValue) {
+        LuceneDoc doc = createDoc(docPath, fooValue);
+        queue.add(doc);
+    }
+
+    private static LuceneDoc createDoc(String docPath, String fooValue) {
+        Document d1 = new Document();
+        d1.add(newPathField(docPath));
+        d1.add(new StringField("foo", fooValue, Field.Store.NO));
+        return LuceneDoc.forUpdate("/oak:index/fooIndex", docPath, d1);
+    }
+
+    private IndexTracker createTracker() throws IOException {
+        IndexCopier indexCopier = new IndexCopier(sameThreadExecutor(), temporaryFolder.getRoot());
+        indexFactory = new NRTIndexFactory(indexCopier, clock, TimeUnit.MILLISECONDS.toSeconds(refreshDelta));
+        return new IndexTracker(
+                new DefaultIndexReaderFactory(defaultMountInfoProvider(), indexCopier),
+                indexFactory
+        );
+    }
+
+    private NodeState createAndPopulateAsyncIndex(IndexingMode indexingMode) throws CommitFailedException {
+        createIndexDefinition("fooIndex", indexingMode);
+
+        //Have some stuff to be indexed
+        builder.child("a").setProperty("foo", "bar");
+        NodeState after = builder.getNodeState();
+        return asyncHook.processCommit(EMPTY_NODE, after, newCommitInfo());
+    }
+
+    private CommitInfo newCommitInfo(){
+        info = new CommitInfo("admin", "s1",
+                ImmutableMap.<String, Object>of(CommitContext.NAME, new SimpleCommitContext()));
+        return info;
+    }
+
+    private void createIndexDefinition(String idxName, IndexingMode indexingMode) {
+        NodeBuilder idx = newLucenePropertyIndexDefinition(builder.child("oak:index"),
+                idxName, ImmutableSet.of("foo"), "async");
+        //Disable compression
+        //idx.setProperty("codec", "oakCodec");
+        TestUtil.enableIndexingMode(idx, indexingMode);
+    }
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/HybridIndexTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/HybridIndexTest.java
new file mode 100644
index 0000000..3e447cd
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/HybridIndexTest.java
@@ -0,0 +1,246 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
+import com.google.common.base.Predicate;
+import org.apache.jackrabbit.oak.Oak;
+import org.apache.jackrabbit.oak.api.ContentRepository;
+import org.apache.jackrabbit.oak.api.Tree;
+import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.plugins.index.AsyncIndexUpdate;
+import org.apache.jackrabbit.oak.plugins.index.counter.NodeCounterEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.TestUtil;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReaderFactory;
+import org.apache.jackrabbit.oak.plugins.index.nodetype.NodeTypeIndexProvider;
+import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.memory.ArrayBasedBlob;
+import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
+import org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent;
+import org.apache.jackrabbit.oak.query.AbstractQueryTest;
+import org.apache.jackrabbit.oak.spi.commit.Observer;
+import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
+import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
+import org.apache.jackrabbit.oak.spi.security.OpenSecurityProvider;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
+import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils;
+import org.apache.jackrabbit.oak.stats.Clock;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import static com.google.common.collect.ImmutableList.of;
+import static com.google.common.util.concurrent.MoreExecutors.sameThreadExecutor;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.LucenePropertyIndexTest.createIndex;
+import static org.apache.jackrabbit.oak.spi.mount.Mounts.defaultMountInfoProvider;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+public class HybridIndexTest extends AbstractQueryTest {
+    private ExecutorService executorService = Executors.newFixedThreadPool(2);
+
+    @Rule
+    public TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
+    private NodeStore nodeStore;
+    private DocumentQueue queue;
+    private Clock clock = new Clock.Virtual();
+    private Whiteboard wb;
+
+    private long refreshDelta = TimeUnit.SECONDS.toMillis(1);
+
+    @Override
+    protected ContentRepository createRepository() {
+        IndexCopier copier;
+        try {
+            copier = new IndexCopier(executorService, temporaryFolder.getRoot());
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+        MountInfoProvider mip = defaultMountInfoProvider();
+
+        NRTIndexFactory nrtIndexFactory = new NRTIndexFactory(copier, clock, TimeUnit.MILLISECONDS.toSeconds(refreshDelta));
+        LuceneIndexReaderFactory indexReaderFactory = new DefaultIndexReaderFactory(mip, copier);
+        IndexTracker tracker = new IndexTracker(indexReaderFactory,nrtIndexFactory);
+        LuceneIndexProvider provider = new LuceneIndexProvider(tracker);
+
+        LuceneIndexEditorProvider editorProvider = new LuceneIndexEditorProvider(copier,
+                tracker,
+                null,
+                null,
+                mip);
+
+        queue = new DocumentQueue(100, tracker, sameThreadExecutor());
+        LocalIndexObserver localIndexObserver = new LocalIndexObserver(queue, StatisticsProvider.NOOP);
+
+        nodeStore = new MemoryNodeStore();
+        Oak oak = new Oak(nodeStore)
+                .with(new InitialContent())
+                .with(new OpenSecurityProvider())
+                .with((QueryIndexProvider) provider)
+                .with((Observer) provider)
+                .with(localIndexObserver)
+                .with(editorProvider)
+                .with(new PropertyIndexEditorProvider())
+                .with(new NodeTypeIndexProvider())
+                .with(new NodeCounterEditorProvider())
+                //Effectively disable async indexing auto run
+                //such that we can control run timing as per test requirement
+                .withAsyncIndexing("async", TimeUnit.DAYS.toSeconds(1));
+
+        wb = oak.getWhiteboard();
+        return oak.createContentRepository();
+    }
+
+    @Test
+    public void hybridIndex() throws Exception{
+        String idxName = "hybridtest";
+        Tree idx = createIndex(root.getTree("/"), idxName, Collections.singleton("foo"));
+        TestUtil.enableIndexingMode(idx, IndexingMode.NRT);
+        root.commit();
+
+        //Get initial indexing done as local indexing only work
+        //for incremental indexing
+        createPath("/a").setProperty("foo", "bar");
+        root.commit();
+
+        runAsyncIndex();
+
+        setTraversalEnabled(false);
+        assertQuery("select [jcr:path] from [nt:base] where [foo] = 'bar'", of("/a"));
+
+        //Add new node. This would not be reflected in result as local index would not be updated
+        createPath("/b").setProperty("foo", "bar");
+        root.commit();
+        assertQuery("select [jcr:path] from [nt:base] where [foo] = 'bar'", of("/a"));
+
+        //Now let some time elapse such that readers can be refreshed
+        clock.waitUntil(clock.getTime() + refreshDelta + 1);
+
+        //Now recently added stuff should be visible without async indexing run
+        assertQuery("select [jcr:path] from [nt:base] where [foo] = 'bar'", of("/a", "/b"));
+
+        createPath("/c").setProperty("foo", "bar");
+        root.commit();
+
+        //Post async index it should still be upto date
+        runAsyncIndex();
+        assertQuery("select [jcr:path] from [nt:base] where [foo] = 'bar'", of("/a", "/b", "/c"));
+    }
+
+    @Test
+    public void noTextExtractionForSyncCommit() throws Exception{
+        String idxName = "hybridtest";
+        Tree idx = TestUtil.createFulltextIndex(root.getTree("/"), idxName);
+        TestUtil.enableIndexingMode(idx, IndexingMode.NRT);
+        root.commit();
+
+        runAsyncIndex();
+
+        AccessRecordingBlob testBlob =
+                new AccessRecordingBlob("<?xml version=\"1.0\" encoding=\"UTF-8\"?><msg>sky is blue</msg>".getBytes());
+
+        Tree test = root.getTree("/").addChild("test");
+        TestUtil.createFileNode(test, "msg", testBlob, "application/xml");
+        root.commit();
+
+        assertEquals(0, testBlob.accessCount);
+        assertQuery("select * from [nt:base] where CONTAINS(*, 'sky')", Collections.<String>emptyList());
+
+        runAsyncIndex();
+        assertEquals(1, testBlob.accessCount);
+        assertQuery("select * from [nt:base] where CONTAINS(*, 'sky')", of("/test/msg/jcr:content"));
+
+    }
+
+    @Test
+    public void hybridIndexSync() throws Exception{
+        String idxName = "hybridtest";
+        Tree idx = createIndex(root.getTree("/"), idxName, Collections.singleton("foo"));
+        TestUtil.enableIndexingMode(idx, IndexingMode.SYNC);
+        root.commit();
+
+        //Get initial indexing done as local indexing only work
+        //for incremental indexing
+        createPath("/a").setProperty("foo", "bar");
+        root.commit();
+
+        runAsyncIndex();
+
+        setTraversalEnabled(false);
+        assertQuery("select [jcr:path] from [nt:base] where [foo] = 'bar'", of("/a"));
+
+        //Add new node. This should get immediately reelected as its a sync index
+        createPath("/b").setProperty("foo", "bar");
+        root.commit();
+        assertQuery("select [jcr:path] from [nt:base] where [foo] = 'bar'", of("/a", "/b"));
+    }
+
+    private void runAsyncIndex() {
+        Runnable async = WhiteboardUtils.getService(wb, Runnable.class, new Predicate<Runnable>() {
+            @Override
+            public boolean apply(@Nullable Runnable input) {
+                return input instanceof AsyncIndexUpdate;
+            }
+        });
+        assertNotNull(async);
+        async.run();
+        root.refresh();
+    }
+
+    private Tree createPath(String path){
+        Tree base = root.getTree("/");
+        for (String e : PathUtils.elements(path)){
+            base = base.addChild(e);
+        }
+        return base;
+    }
+
+    private static class AccessRecordingBlob extends ArrayBasedBlob {
+        int accessCount = 0;
+        public AccessRecordingBlob(byte[] value) {
+            super(value);
+        }
+
+        @Nonnull
+        @Override
+        public InputStream getNewStream() {
+            accessCount++;
+            return super.getNewStream();
+        }
+    }
+}
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexObserverTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexObserverTest.java
new file mode 100644
index 0000000..1ef338f
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexObserverTest.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.concurrent.Executor;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.jackrabbit.oak.core.SimpleCommitContext;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+public class LocalIndexObserverTest {
+    static final Executor NOOP_EXECUTOR = new Executor() {
+        @Override
+        public void execute(Runnable command) {
+
+        }
+    };
+
+    private IndexTracker tracker = new IndexTracker();
+    private DocumentQueue collectingQueue;
+    private LocalIndexObserver observer;
+
+    @Before
+    public void setUp(){
+        collectingQueue = new DocumentQueue(10, tracker, NOOP_EXECUTOR);
+        observer = new LocalIndexObserver(collectingQueue, StatisticsProvider.NOOP);
+    }
+
+    @Test
+    public void nullCommitInfo() throws Exception{
+        observer.contentChanged(EMPTY_NODE, null);
+    }
+
+    @Test
+    public void noCommitContext() throws Exception{
+        observer.contentChanged(EMPTY_NODE, CommitInfo.EMPTY);
+    }
+
+    @Test
+    public void noDocHolder() throws Exception{
+        observer.contentChanged(EMPTY_NODE, newCommitInfo());
+    }
+
+    @Test
+    public void docsAddedToQueue() throws Exception{
+        CommitInfo info = newCommitInfo();
+        CommitContext cc = (CommitContext) info.getInfo().get(CommitContext.NAME);
+
+        LuceneDocumentHolder holder = new LuceneDocumentHolder();
+        holder.getNRTIndexedDocList("foo").add(LuceneDoc.forDelete("foo", "bar"));
+
+        cc.set(LuceneDocumentHolder.NAME, holder);
+
+        observer.contentChanged(EMPTY_NODE, info);
+
+        assertEquals(1, collectingQueue.getQueuedDocs().size());
+        assertNull(cc.get(LuceneDocumentHolder.NAME));
+    }
+
+    private CommitInfo newCommitInfo(){
+        return new CommitInfo("admin", "s1",
+                ImmutableMap.<String, Object>of(CommitContext.NAME, new SimpleCommitContext()));
+    }
+
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexWriterFactoryTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexWriterFactoryTest.java
new file mode 100644
index 0000000..ae0e329
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/LocalIndexWriterFactoryTest.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.IOException;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.core.SimpleCommitContext;
+import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.TestUtil;
+import org.apache.jackrabbit.oak.spi.commit.CommitContext;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.EditorHook;
+import org.apache.jackrabbit.oak.spi.mount.Mounts;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper.newLucenePropertyIndexDefinition;
+import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent.INITIAL_CONTENT;
+import static org.junit.Assert.*;
+
+public class LocalIndexWriterFactoryTest {
+    private NodeState root = INITIAL_CONTENT;
+    private NodeBuilder builder = root.builder();
+
+    private EditorHook syncHook;
+    private EditorHook asyncHook;
+    private CommitInfo info;
+    private LuceneIndexEditorProvider editorProvider;
+
+    @Before
+    public void setUp() throws IOException {
+        editorProvider = new LuceneIndexEditorProvider(
+                null,
+                null,
+                null,
+                Mounts.defaultMountInfoProvider()
+        );
+
+        syncHook = new EditorHook(new IndexUpdateProvider(editorProvider));
+        asyncHook = new EditorHook(new IndexUpdateProvider(editorProvider, "async", false));
+    }
+
+    @After
+    public void cleanup() throws IOException {
+    }
+
+    @Test
+    public void ignoreReindexCase() throws Exception{
+        createIndexDefinition("fooIndex", IndexingMode.NRT);
+
+        builder.child("a").setProperty("foo", "bar");
+        NodeState after = builder.getNodeState();
+        syncHook.processCommit(EMPTY_NODE, after, newCommitInfo());
+
+        //This is reindex case so nothing would be indexed
+        //So now holder should be present in context
+        assertNull(getHolder());
+        assertNull(getCommitAttribute(LocalIndexWriterFactory.COMMIT_PROCESSED_BY_LOCAL_LUCENE_EDITOR));
+    }
+
+    @Test
+    public void holderNotInitializedUnlessIndexed() throws Exception{
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        builder = indexed.builder();
+        builder.child("b");
+        NodeState after = builder.getNodeState();
+        syncHook.processCommit(indexed, after, newCommitInfo());
+
+        //This is incremental index case but no entry for fooIndex
+        //so holder should be null
+        assertNull(getHolder());
+        assertNotNull(getCommitAttribute(LocalIndexWriterFactory.COMMIT_PROCESSED_BY_LOCAL_LUCENE_EDITOR));
+    }
+
+    @Test
+    public void localIndexWriter() throws Exception{
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        builder = indexed.builder();
+        builder.child("b").setProperty("foo", "bar");
+        builder.child("c").setProperty("foo", "bar");
+        builder.child("a").remove();
+        NodeState after = builder.getNodeState();
+        syncHook.processCommit(indexed, after, newCommitInfo());
+
+        LuceneDocumentHolder holder = getHolder();
+        assertNotNull(holder);
+
+        //2 add none for delete
+        assertEquals(2, holder.getNRTIndexedDocList("/oak:index/fooIndex").size());
+    }
+
+    @Test
+    public void mutlipleIndex() throws Exception{
+        NodeState indexed = createAndPopulateTwoAsyncIndex(IndexingMode.NRT);
+        builder = indexed.builder();
+        builder.child("b").setProperty("foo", "bar");
+        builder.child("c").setProperty("bar", "foo");
+        builder.child("a").remove();
+        NodeState after = builder.getNodeState();
+        syncHook.processCommit(indexed, after, newCommitInfo());
+
+        LuceneDocumentHolder holder = getHolder();
+        assertNotNull(holder);
+
+        //1 add  - bar
+        assertEquals(1, holder.getNRTIndexedDocList("/oak:index/fooIndex").size());
+
+        //1 add  - bar
+        assertEquals(1, holder.getNRTIndexedDocList("/oak:index/barIndex").size());
+
+    }
+
+    @Test
+    public void syncIndexing() throws Exception{
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.SYNC);
+        builder = indexed.builder();
+        builder.child("b").setProperty("foo", "bar");
+        builder.child("c").setProperty("foo", "bar");
+        NodeState after = builder.getNodeState();
+        syncHook.processCommit(indexed, after, newCommitInfo());
+
+        LuceneDocumentHolder holder = getHolder();
+        assertNotNull(holder);
+
+        //2 add none for delete
+        assertEquals(2, holder.getSyncIndexedDocList("/oak:index/fooIndex").size());
+        assertEquals(0, holder.getNRTIndexedDocList("/oak:index/fooIndex").size());
+    }
+
+    @Test
+    public void inMemoryDocLimit() throws Exception{
+        NodeState indexed = createAndPopulateAsyncIndex(IndexingMode.NRT);
+        editorProvider.setInMemoryDocsLimit(5);
+        builder = indexed.builder();
+        for (int i = 0; i < 10; i++) {
+            builder.child("b" + i).setProperty("foo", "bar");
+        }
+        NodeState after = builder.getNodeState();
+        syncHook.processCommit(indexed, after, newCommitInfo());
+
+        LuceneDocumentHolder holder = getHolder();
+        assertEquals(5, holder.getNRTIndexedDocList("/oak:index/fooIndex").size());
+    }
+
+    private NodeState createAndPopulateAsyncIndex(IndexingMode indexingMode) throws CommitFailedException {
+        createIndexDefinition("fooIndex", indexingMode);
+
+        //Have some stuff to be indexed
+        builder.child("a").setProperty("foo", "bar");
+        NodeState after = builder.getNodeState();
+        return asyncHook.processCommit(EMPTY_NODE, after, newCommitInfo());
+    }
+
+    private NodeState createAndPopulateTwoAsyncIndex(IndexingMode indexingMode) throws CommitFailedException {
+        createIndexDefinition("fooIndex", indexingMode);
+        createIndexDefinition("barIndex", indexingMode);
+
+        //Have some stuff to be indexed
+        builder.child("a").setProperty("foo", "bar");
+        builder.child("a").setProperty("bar", "foo");
+        NodeState after = builder.getNodeState();
+        return asyncHook.processCommit(EMPTY_NODE, after, newCommitInfo());
+    }
+
+    private LuceneDocumentHolder getHolder(){
+        return (LuceneDocumentHolder) getCommitAttribute(LuceneDocumentHolder.NAME);
+    }
+
+    private Object getCommitAttribute(String name){
+        CommitContext cc = (CommitContext) info.getInfo().get(CommitContext.NAME);
+        return cc.get(name);
+    }
+
+    private CommitInfo newCommitInfo(){
+        info = new CommitInfo("admin", "s1",
+                ImmutableMap.<String, Object>of(CommitContext.NAME, new SimpleCommitContext()));
+        return info;
+    }
+
+    private void createIndexDefinition(String idxName, IndexingMode indexingMode) {
+        NodeBuilder idx = newLucenePropertyIndexDefinition(builder.child("oak:index"),
+                idxName, ImmutableSet.of("foo"), "async");
+        TestUtil.enableIndexingMode(idx, indexingMode);
+    }
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexFactoryTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexFactoryTest.java
new file mode 100644
index 0000000..981c913
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexFactoryTest.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.TestUtil;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import static com.google.common.util.concurrent.MoreExecutors.sameThreadExecutor;
+import static org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent.INITIAL_CONTENT;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public class NRTIndexFactoryTest {
+    @Rule
+    public TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
+
+    private NodeState root = INITIAL_CONTENT;
+    private NodeBuilder builder = root.builder();
+
+    private IndexCopier indexCopier;
+    private NRTIndexFactory indexFactory;
+
+    @Before
+    public void setUp() throws IOException {
+        indexCopier = new IndexCopier(sameThreadExecutor(), temporaryFolder.getRoot());
+        indexFactory = new NRTIndexFactory(indexCopier);
+    }
+
+    @Test
+    public void noIndexForAsync() throws Exception{
+        IndexDefinition idxDefn = new IndexDefinition(root, builder.getNodeState());
+        assertNull(indexFactory.createIndex(idxDefn));
+    }
+
+    @Test
+    public void indexCreationNRT() throws Exception{
+        IndexDefinition idxDefn = getIndexDefinition("/foo", IndexingMode.SYNC);
+
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        assertNotNull(idx1);
+        assertEquals(1, indexFactory.getIndexes("/foo").size());
+    }
+
+    @Test
+    public void indexCreationSync() throws Exception{
+        IndexDefinition idxDefn = getNRTIndexDefinition("/foo");
+
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        assertNotNull(idx1);
+        assertEquals(1, indexFactory.getIndexes("/foo").size());
+    }
+
+    @Test
+    public void indexCreationAndCloser() throws Exception{
+        IndexDefinition idxDefn = getNRTIndexDefinition("/foo");
+
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        assertNotNull(idx1);
+        assertEquals(1, indexFactory.getIndexes("/foo").size());
+
+        NRTIndex idx2 = indexFactory.createIndex(idxDefn);
+        assertEquals(2, indexFactory.getIndexes("/foo").size());
+        assertFalse(idx1.isClosed());
+
+        NRTIndex idx3 = indexFactory.createIndex(idxDefn);
+        assertEquals(2, indexFactory.getIndexes("/foo").size());
+
+        //With 2 generation open the first one should be closed
+        assertTrue(idx1.isClosed());
+    }
+
+    @Test
+    public void closeIndexOnClose() throws Exception{
+        IndexDefinition idxDefn = getNRTIndexDefinition("/foo");
+
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        NRTIndex idx2 = indexFactory.createIndex(idxDefn);
+        assertEquals(2, indexFactory.getIndexes("/foo").size());
+
+        indexFactory.close();
+        assertEquals(0, indexFactory.getIndexes("/foo").size());
+        assertTrue(idx1.isClosed());
+        assertTrue(idx2.isClosed());
+    }
+
+    private IndexDefinition getNRTIndexDefinition(String indexPath) {
+       return getIndexDefinition(indexPath, IndexingMode.NRT);
+    }
+
+    private IndexDefinition getIndexDefinition(String indexPath, IndexingMode indexingMode) {
+        builder.setProperty(IndexConstants.INDEX_PATH, indexPath);
+        TestUtil.enableIndexingMode(builder, indexingMode);
+
+        return new IndexDefinition(root, builder.getNodeState());
+    }
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexTest.java
new file mode 100644
index 0000000..643c530
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/NRTIndexTest.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.IndexingMode;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorContext;
+import org.apache.jackrabbit.oak.plugins.index.lucene.TestUtil;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReader;
+import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.lucene.document.Document;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import static com.google.common.util.concurrent.MoreExecutors.sameThreadExecutor;
+import static org.apache.jackrabbit.oak.plugins.index.lucene.FieldFactory.newPathField;
+import static org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent.INITIAL_CONTENT;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public class NRTIndexTest {
+    @Rule
+    public TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
+
+    private NodeState root = INITIAL_CONTENT;
+    private NodeBuilder builder = root.builder();
+
+    private IndexCopier indexCopier;
+    private NRTIndexFactory indexFactory;
+
+    @Before
+    public void setUp() throws IOException {
+        indexCopier = new IndexCopier(sameThreadExecutor(), temporaryFolder.getRoot());
+        indexFactory = new NRTIndexFactory(indexCopier);
+        LuceneIndexEditorContext.configureUniqueId(builder);
+    }
+
+    @After
+    public void cleanup() throws IOException {
+        indexFactory.close();
+        indexCopier.close();
+    }
+
+    @Test
+    public void getReaderWithoutWriter() throws Exception{
+        IndexDefinition idxDefn = getSyncIndexDefinition("/foo");
+
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        List<LuceneIndexReader> readers = idx1.getReaders();
+        assertNotNull(readers);
+        assertTrue(readers.isEmpty());
+
+        idx1.close();
+        assertTrue(idx1.isClosed());
+
+        //Closing multiple times should not raise exception
+        idx1.close();
+    }
+
+    @Test
+    public void writerCreation() throws Exception{
+        IndexDefinition idxDefn = getSyncIndexDefinition("/foo");
+        NRTIndex idx = indexFactory.createIndex(idxDefn);
+        LuceneIndexWriter writer = idx.getWriter();
+
+        assertNotNull(writer);
+        assertNotNull(idx.getIndexDir());
+        List<LuceneIndexReader> readers = idx.getReaders();
+        assertEquals(1, readers.size());
+
+        LuceneIndexWriter writer2 = idx.getWriter();
+        assertSame(writer, writer2);
+    }
+
+    @Test
+    public void dirDeletedUponClose() throws Exception{
+        IndexDefinition idxDefn = getSyncIndexDefinition("/foo");
+        NRTIndex idx = indexFactory.createIndex(idxDefn);
+        LuceneIndexWriter writer = idx.getWriter();
+        File indexDir = idx.getIndexDir();
+
+        assertTrue(indexDir.exists());
+
+        idx.close();
+        assertFalse(indexDir.exists());
+
+        try{
+            idx.getReaders();
+            fail();
+        } catch (IllegalStateException ignore){
+
+        }
+
+        try{
+            idx.getWriter();
+            fail();
+        } catch (IllegalStateException ignore){
+
+        }
+    }
+
+    @Test
+    public void multipleUpdateForSamePath() throws Exception{
+        IndexDefinition idxDefn = getSyncIndexDefinition("/foo");
+        NRTIndex idx = indexFactory.createIndex(idxDefn);
+        LuceneIndexWriter writer = idx.getWriter();
+
+        Document document = new Document();
+        document.add(newPathField("/a/b"));
+
+        writer.updateDocument("/a/b", document);
+        assertEquals(1, idx.getPrimaryReader().getReader().numDocs());
+
+        writer.updateDocument("/a/b", document);
+
+        //Update for same path should not lead to deletion
+        assertEquals(2, idx.getPrimaryReader().getReader().numDocs());
+        assertEquals(0, idx.getPrimaryReader().getReader().numDeletedDocs());
+    }
+
+    @Test
+    public void previousIndexInitialized() throws Exception{
+        IndexDefinition idxDefn = getSyncIndexDefinition("/foo");
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        LuceneIndexWriter w1 = idx1.getWriter();
+
+        Document d1 = new Document();
+        d1.add(newPathField("/a/b"));
+        w1.updateDocument("/a/b", d1);
+
+        NRTIndex idx2 = indexFactory.createIndex(idxDefn);
+        assertEquals(1, idx2.getReaders().size());
+
+        LuceneIndexWriter w2 = idx2.getWriter();
+        assertEquals(2, idx2.getReaders().size());
+
+        assertNotEquals(idx1.getIndexDir(), idx2.getIndexDir());
+    }
+
+    @Test
+    public void sameReaderIfNoChange() throws Exception{
+        IndexDefinition idxDefn = getSyncIndexDefinition("/foo");
+        NRTIndex idx1 = indexFactory.createIndex(idxDefn);
+        LuceneIndexWriter w1 = idx1.getWriter();
+
+        Document d1 = new Document();
+        d1.add(newPathField("/a/b"));
+        w1.updateDocument("/a/b", d1);
+
+        List<LuceneIndexReader> readers = idx1.getReaders();
+        List<LuceneIndexReader> readers2 = idx1.getReaders();
+
+        assertSame(readers, readers2);
+
+        w1.updateDocument("/a/b", d1);
+        List<LuceneIndexReader> readers3 = idx1.getReaders();
+        assertNotSame(readers2, readers3);
+    }
+
+    private IndexDefinition getSyncIndexDefinition(String indexPath) {
+        builder.setProperty(IndexConstants.INDEX_PATH, indexPath);
+        TestUtil.enableIndexingMode(builder, IndexingMode.NRT);
+
+        return new IndexDefinition(root, builder.getNodeState());
+    }
+
+
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RecordingRunnable.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RecordingRunnable.java
new file mode 100644
index 0000000..e8eb04a
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RecordingRunnable.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+class RecordingRunnable implements Runnable {
+    private boolean invoked;
+    @Override
+    public void run() {
+        invoked = true;
+    }
+
+    public void assertInvokedAndReset(){
+        assertTrue(invoked);
+        reset();
+    }
+
+    public void assertNotInvokedAndReset(){
+        assertFalse(invoked);
+        reset();
+    }
+
+    public void reset(){
+        invoked = false;
+    }
+}
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnReadPolicyTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnReadPolicyTest.java
new file mode 100644
index 0000000..ba214e4
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnReadPolicyTest.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.jackrabbit.oak.stats.Clock;
+import org.junit.Test;
+
+public class RefreshOnReadPolicyTest {
+    private Clock clock = new Clock.Virtual();
+    private RecordingRunnable refreshCallback = new RecordingRunnable();
+    private RefreshOnReadPolicy policy = new RefreshOnReadPolicy(clock, TimeUnit.SECONDS, 1);
+    private long refreshDelta = TimeUnit.SECONDS.toMillis(1) + 1;
+
+    @Test
+    public void noRefreshOnReadIfNotUpdated() throws Exception{
+        policy.refreshOnReadIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+    }
+
+    @Test
+    public void refreshOnFirstWrite() throws Exception{
+        clock.waitUntil(System.currentTimeMillis());
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+    }
+
+    @Test
+    public void refreshOnReadAfterWrite() throws Exception{
+        clock.waitUntil(System.currentTimeMillis());
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.reset();
+        //Call again without change in time
+        policy.refreshOnWriteIfRequired(refreshCallback);
+
+        //This time callback should not be invoked
+        refreshCallback.assertNotInvokedAndReset();
+
+        policy.refreshOnReadIfRequired(refreshCallback);
+        //On read the callback should be invoked
+        refreshCallback.assertInvokedAndReset();
+    }
+
+    @Test
+    public void refreshOnWriteWithTimeElapsed() throws Exception{
+        clock.waitUntil(System.currentTimeMillis());
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.reset();
+
+        //Call again without change in time
+        policy.refreshOnWriteIfRequired(refreshCallback);
+
+        //This time callback should not be invoked
+        refreshCallback.assertNotInvokedAndReset();
+
+        clock.waitUntil(clock.getTime() + refreshDelta);
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+    }
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnWritePolicyTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnWritePolicyTest.java
new file mode 100644
index 0000000..09dbc83
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/RefreshOnWritePolicyTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import org.junit.Test;
+
+public class RefreshOnWritePolicyTest {
+    private RecordingRunnable refreshCallback = new RecordingRunnable();
+
+    @Test
+    public void noRefreshOnRead() throws Exception{
+        RefreshOnWritePolicy policy = new RefreshOnWritePolicy();
+        policy.refreshOnReadIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        //Even after update it should not be refreshed
+        policy.updated();
+        policy.refreshOnReadIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+    }
+
+    @Test
+    public void refreshOnWrite() throws Exception{
+        RefreshOnWritePolicy policy = new RefreshOnWritePolicy();
+
+        policy.updated();
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+    }
+
+}
\ No newline at end of file
diff --git a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/TimedRefreshPolicyTest.java b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/TimedRefreshPolicyTest.java
new file mode 100644
index 0000000..f2da012
--- /dev/null
+++ b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/hybrid/TimedRefreshPolicyTest.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.index.lucene.hybrid;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.jackrabbit.oak.stats.Clock;
+import org.junit.Test;
+
+public class TimedRefreshPolicyTest {
+    private Clock clock = new Clock.Virtual();
+    private RecordingRunnable refreshCallback = new RecordingRunnable();
+
+    @Test
+    public void dirtyAndFirstCheck() throws Exception{
+        clock.waitUntil(System.currentTimeMillis());
+        TimedRefreshPolicy policy = new TimedRefreshPolicy(clock, TimeUnit.SECONDS, 1);
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        policy.updated();
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+    }
+
+    @Test
+    public void dirtyAndNotElapsedTimed() throws Exception{
+        clock.waitUntil(System.currentTimeMillis());
+        TimedRefreshPolicy policy = new TimedRefreshPolicy(clock, TimeUnit.SECONDS, 1);
+
+        policy.updated();
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        policy.updated();
+        //Given time has not elapsed it should still be false
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+    }
+
+    @Test
+    public void dirtyAndElapsedTime() throws Exception{
+        clock.waitUntil(System.currentTimeMillis());
+        TimedRefreshPolicy policy = new TimedRefreshPolicy(clock, TimeUnit.SECONDS, 1);
+
+        policy.updated();
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        policy.updated();
+        //Given time has not elapsed it should still be false
+        //in both reader and writer mode
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        //Let the refresh delta time elapse
+        long refreshDelta = TimeUnit.SECONDS.toMillis(1) + 1;
+        clock.waitUntil(System.currentTimeMillis() + refreshDelta);
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+
+        policy.refreshOnWriteIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+
+        policy.updated();
+        //Do similar check for read
+        clock.waitUntil(clock.getTime() + refreshDelta);
+
+        policy.refreshOnReadIfRequired(refreshCallback);
+        refreshCallback.assertInvokedAndReset();
+
+        policy.refreshOnReadIfRequired(refreshCallback);
+        refreshCallback.assertNotInvokedAndReset();
+    }
+}
\ No newline at end of file
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java
index 16bf194..8aebbe3 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java
@@ -434,11 +434,11 @@ protected void afterSuite() throws Exception {
      * @return context instance to be used for runTest call for the
      * current thread
      */
-    protected T prepareThreadExecutionContext() {
+    protected T prepareThreadExecutionContext() throws Exception{
         return null;
     }
 
-    protected void disposeThreadExecutionContext(T context) {
+    protected void disposeThreadExecutionContext(T context) throws Exception{
 
     }
 
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java
index 26d5f48..9370692 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java
@@ -395,7 +395,8 @@ public static void main(String[] args) throws Exception {
             // benchmarks for oak-auth-external
             new ExternalLoginTest(numberOfUsers.value(options), numberOfGroups.value(options), expiration.value(options), dynamicMembership.value(options), autoMembership.values(options)),
             new SyncAllExternalUsersTest(numberOfUsers.value(options), numberOfGroups.value(options), expiration.value(options), dynamicMembership.value(options), autoMembership.values(options)),
-            new SyncExternalUsersTest(numberOfUsers.value(options), numberOfGroups.value(options), expiration.value(options), dynamicMembership.value(options), autoMembership.values(options), batchSize.value(options))
+            new SyncExternalUsersTest(numberOfUsers.value(options), numberOfGroups.value(options), expiration.value(options), dynamicMembership.value(options), autoMembership.values(options), batchSize.value(options)),
+            new HybridIndexTest(base.value(options))
         };
 
         Set<String> argset = Sets.newHashSet(nonOption.values(options));
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/HybridIndexTest.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/HybridIndexTest.java
new file mode 100644
index 0000000..7100b2e
--- /dev/null
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/HybridIndexTest.java
@@ -0,0 +1,449 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.benchmark;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Queue;
+import java.util.Random;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import javax.jcr.Node;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryManager;
+import javax.jcr.query.QueryResult;
+
+import com.codahale.metrics.ConsoleReporter;
+import com.codahale.metrics.Metric;
+import com.codahale.metrics.MetricFilter;
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterators;
+import com.google.common.util.concurrent.MoreExecutors;
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.oak.Oak;
+import org.apache.jackrabbit.oak.fixture.JcrCreator;
+import org.apache.jackrabbit.oak.fixture.OakRepositoryFixture;
+import org.apache.jackrabbit.oak.fixture.RepositoryFixture;
+import org.apache.jackrabbit.oak.jcr.Jcr;
+import org.apache.jackrabbit.oak.plugins.index.AsyncIndexUpdate;
+import org.apache.jackrabbit.oak.plugins.index.IndexUtils;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
+import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.DocumentQueue;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexObserver;
+import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
+import org.apache.jackrabbit.oak.plugins.index.lucene.reader.LuceneIndexReaderFactory;
+import org.apache.jackrabbit.oak.plugins.index.lucene.util.IndexDefinitionBuilder;
+import org.apache.jackrabbit.oak.plugins.metric.MetricStatisticsProvider;
+import org.apache.jackrabbit.oak.spi.commit.Observer;
+import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer;
+import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
+import org.apache.jackrabbit.oak.spi.mount.Mounts;
+import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
+import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils;
+import org.apache.jackrabbit.oak.stats.Clock;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static java.util.Collections.singleton;
+import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.NT_OAK_UNSTRUCTURED;
+
+public class HybridIndexTest extends AbstractTest<HybridIndexTest.TestContext> {
+
+    private static final ScheduledExecutorService executorService = MoreExecutors.getExitingScheduledExecutorService(
+            (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(5));
+    private static final boolean metricStatsEnabled =
+            Boolean.parseBoolean(System.getProperty("metricStatsEnabled", "true"));
+    private static MetricStatisticsProvider metricStatsProvider;
+    public static final StatisticsProvider STATISTICS_PROVIDER = getStatsProvider(metricStatsEnabled);
+
+    enum Status {
+        NONE, STARTING, STARTED, STOPPING, STOPPED, ABORTED;
+
+        private int count;
+
+        public void inc(){
+            count++;
+        }
+
+        public int count(){
+            return count;
+        }
+
+        public Status next(){
+            Status[] ss = values();
+            if (ordinal() == ss.length - 1){
+                return ss[0];
+            }
+            return ss[ordinal() + 1];
+        }
+    }
+
+    private final Random random = new Random(42); //fixed seed
+    private String indexedPropName = "foo";
+    private int nodesPerIteration = Status.values().length;
+    private int numOfIndexes = Integer.getInteger("numOfIndexes", 10);
+    private int refreshDeltaMillis = Integer.getInteger("refreshDeltaMillis", 1000);
+    private int asyncInterval = Integer.getInteger("asyncInterval", 5);
+    private int queueSize = Integer.getInteger("queueSize", 1000);
+    private boolean hybridIndexEnabled = Boolean.getBoolean("hybridIndexEnabled");
+    private boolean useOakCodec = Boolean.getBoolean("useOakCodec");
+    private String indexingMode = System.getProperty("indexingMode", "nrt");
+
+    private boolean searcherEnabled = Boolean.parseBoolean(System.getProperty("searcherEnabled", "true"));
+    private File indexCopierDir;
+    private IndexCopier copier;
+    private NRTIndexFactory nrtIndexFactory;
+    private LuceneIndexProvider luceneIndexProvider;
+    private LuceneIndexEditorProvider luceneEditorProvider;
+    private DocumentQueue queue;
+    private LocalIndexObserver localIndexObserver;
+    private RepositoryInitializer indexInitializer = new PropertyIndexInitializer();
+    private TestContext defaultContext;
+    private final File workDir;
+    private Whiteboard whiteboard;
+    private Searcher searcher;
+    private Mutator mutator;
+    private final AtomicInteger indexedNodeCount = new AtomicInteger();
+    private List<TestContext> contexts = new ArrayList<>();
+
+    public HybridIndexTest(File workDir) {
+        this.workDir = workDir;
+    }
+
+    @Override
+    protected Repository[] createRepository(RepositoryFixture fixture) throws Exception {
+        if (fixture instanceof OakRepositoryFixture) {
+            return ((OakRepositoryFixture) fixture).setUpCluster(1, new JcrCreator() {
+                @Override
+                public Jcr customize(Oak oak) {
+                    Jcr jcr = new Jcr(oak);
+                    if (hybridIndexEnabled) {
+                        prepareLuceneIndexer(workDir);
+                        jcr.with((QueryIndexProvider) luceneIndexProvider)
+                                .with((Observer) luceneIndexProvider)
+                                .with(localIndexObserver)
+                                .with(luceneEditorProvider);
+                        indexInitializer = new LuceneIndexInitializer();
+                    }
+                    whiteboard = oak.getWhiteboard();
+                    jcr.with(indexInitializer);
+
+                    //Async indexing is enabled for both property and lucene
+                    //as for property it relies on counter index
+                    oak.withAsyncIndexing("async", asyncInterval);
+                    return jcr;
+                }
+            });
+        }
+        return super.createRepository(fixture);
+    }
+
+    @Override
+    public void beforeSuite() throws Exception {
+        if (hybridIndexEnabled) {
+            runAsyncIndex();
+        }
+        defaultContext = new TestContext();
+        contexts.add(defaultContext);
+        searcher = new Searcher();
+        mutator = new Mutator();
+
+        if (searcherEnabled) {
+            addBackgroundJob(searcher);
+        }
+
+        addBackgroundJob(mutator);
+    }
+
+    @Override
+    protected TestContext prepareThreadExecutionContext() throws RepositoryException {
+        TestContext ctx = new TestContext();
+        contexts.add(ctx);
+        return ctx;
+    }
+
+    @Override
+    protected void runTest() throws Exception {
+        runTest(defaultContext);
+    }
+
+    @Override
+    protected void runTest(TestContext ctx)  throws Exception {
+        //Create tree in breadth first fashion with each node having 50 child
+        Node parent = ctx.session.getNode(ctx.paths.remove());
+        Status status = Status.NONE;
+        for (int i = 0; i < nodesPerIteration; i++) {
+            Node child = parent.addNode(nextNodeName());
+            child.setProperty(indexedPropName, status.name());
+            ctx.session.save();
+            ctx.paths.add(child.getPath());
+            indexedNodeCount.incrementAndGet();
+            status.inc();
+            status = status.next();
+        }
+    }
+
+    @Override
+    protected void disposeThreadExecutionContext(TestContext context) throws RepositoryException {
+        context.dispose();
+    }
+
+    @Override
+    protected void afterSuite() throws Exception {
+        if (hybridIndexEnabled){
+            //TODO This to avoid issue with Indexing still running post afterSuite call
+            //TO handle this properly we would need a callback after repository shutdown
+            //and before NodeStore teardown
+            getAsyncIndexUpdate().close();
+            queue.close();
+            nrtIndexFactory.close();
+        }
+
+        if (indexCopierDir != null) {
+            FileUtils.deleteDirectory(indexCopierDir);
+        }
+        System.out.printf("numOfIndexes: %d, refreshDeltaMillis: %d, asyncInterval: %d, queueSize: %d , " +
+                        "hybridIndexEnabled: %s, metricStatsEnabled: %s, indexingMode: %s, " +
+                        "useOakCodec: %s %n",
+                numOfIndexes, refreshDeltaMillis, asyncInterval, queueSize, hybridIndexEnabled,
+                metricStatsEnabled, indexingMode, useOakCodec);
+        System.out.printf("Searcher: %d, Mutator: %d, indexedNodeCount: %d %n", searcher.resultSize,
+                mutator.mutationCount, indexedNodeCount.get());
+
+        dumpStats();
+    }
+
+    private void dumpStats() {
+        if (!metricStatsEnabled) {
+            return;
+        }
+        ConsoleReporter.forRegistry(metricStatsProvider.getRegistry())
+                .outputTo(System.out)
+                .filter(new MetricFilter() {
+                    @Override
+                    public boolean matches(String name, Metric metric) {
+                        return name.startsWith("HYBRID") || name.startsWith("DOCUMENT_NS_MERGE");
+                    }
+                })
+                .build()
+                .report();
+    }
+
+    protected class TestContext {
+        final Session session = loginWriter();
+        final Queue<String> paths = new LinkedBlockingDeque<>();
+
+        final Node dump;
+
+        public TestContext() throws RepositoryException {
+            dump = session.getRootNode()
+                    .addNode(nextNodeName(), NT_OAK_UNSTRUCTURED)
+                    .addNode(nextNodeName(), NT_OAK_UNSTRUCTURED)
+                    .addNode(nextNodeName(), NT_OAK_UNSTRUCTURED)
+                    .addNode(nextNodeName(), NT_OAK_UNSTRUCTURED)
+                    .addNode(nextNodeName(), NT_OAK_UNSTRUCTURED)
+                    .addNode(nextNodeName(), NT_OAK_UNSTRUCTURED);
+            session.save();
+            paths.add(dump.getPath());
+        }
+
+        public void dispose() throws RepositoryException {
+            dump.remove();
+            session.logout();
+        }
+    }
+
+    private String randomStatus() {
+        Status status = Status.values()[random.nextInt(Status.values().length)];
+        status.inc();
+        return status.name();
+    }
+
+    private void prepareLuceneIndexer(File workDir) {
+        try {
+            indexCopierDir = createTemporaryFolderIn(workDir);
+            copier = new IndexCopier(executorService, indexCopierDir);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        nrtIndexFactory = new NRTIndexFactory(copier, Clock.SIMPLE,
+                TimeUnit.MILLISECONDS.toSeconds(refreshDeltaMillis));
+        MountInfoProvider mip = Mounts.defaultMountInfoProvider();
+        LuceneIndexReaderFactory indexReaderFactory = new DefaultIndexReaderFactory(mip, copier);
+        IndexTracker tracker = new IndexTracker(indexReaderFactory, nrtIndexFactory);
+        luceneIndexProvider = new LuceneIndexProvider(tracker);
+        luceneEditorProvider = new LuceneIndexEditorProvider(copier,
+                tracker,
+                null, //extractedTextCache
+                null, //augmentorFactory
+                mip);
+
+        StatisticsProvider sp = STATISTICS_PROVIDER;
+        queue = new DocumentQueue(queueSize, tracker, executorService, sp);
+        localIndexObserver = new LocalIndexObserver(queue, sp);
+    }
+
+    private void runAsyncIndex() {
+        checkNotNull(getAsyncIndexUpdate()).run();
+    }
+
+    private AsyncIndexUpdate getAsyncIndexUpdate() {
+        return (AsyncIndexUpdate)WhiteboardUtils.getService(whiteboard, Runnable.class, new Predicate<Runnable>() {
+                @Override
+                public boolean apply(@Nullable Runnable input) {
+                    return input instanceof AsyncIndexUpdate;
+                }
+            });
+    }
+
+    @SuppressWarnings("ResultOfMethodCallIgnored")
+    private static File createTemporaryFolderIn(File parentFolder) throws IOException {
+        File createdFolder = File.createTempFile("oak-", "", parentFolder);
+        createdFolder.delete();
+        createdFolder.mkdir();
+        return createdFolder;
+    }
+
+    private static StatisticsProvider getStatsProvider(boolean metricStatsEnabled){
+        StatisticsProvider sp = StatisticsProvider.NOOP;
+        if (metricStatsEnabled) {
+            metricStatsProvider = new MetricStatisticsProvider(null, executorService);
+            sp = metricStatsProvider;
+        }
+        return sp;
+    }
+
+
+    private class PropertyIndexInitializer implements RepositoryInitializer {
+
+        @Override
+        public void initialize(@Nonnull NodeBuilder builder) {
+            NodeBuilder oakIndex = IndexUtils.getOrCreateOakIndex(builder);
+            addPropIndexDefn(oakIndex, indexedPropName);
+            for (int i = 0; i < numOfIndexes - 1; i++) {
+                addPropIndexDefn(oakIndex, indexedPropName + i);
+            }
+        }
+
+        private void addPropIndexDefn(NodeBuilder parent, String propName){
+            try {
+                IndexUtils.createIndexDefinition(parent, propName, false,
+                        singleton(propName), null, "property", null);
+            } catch (RepositoryException e) {
+                throw new RuntimeException(e);
+            }
+
+        }
+    }
+
+    private class LuceneIndexInitializer implements RepositoryInitializer {
+        @Override
+        public void initialize(@Nonnull NodeBuilder builder) {
+            NodeBuilder oakIndex = IndexUtils.getOrCreateOakIndex(builder);
+
+            IndexDefinitionBuilder defnBuilder = new IndexDefinitionBuilder();
+            defnBuilder.evaluatePathRestrictions();
+            defnBuilder.async("async", indexingMode);
+            defnBuilder.indexRule("nt:base").property(indexedPropName).propertyIndex();
+            if (useOakCodec) {
+                defnBuilder.codec("oakCodec");
+            }
+
+            for (int i = 0; i < numOfIndexes - 1; i++) {
+                defnBuilder.indexRule("nt:base").property(indexedPropName + i).propertyIndex();
+            }
+
+            oakIndex.setChildNode(indexedPropName, defnBuilder.build());
+        }
+    }
+
+    private class Searcher implements Runnable {
+        final Session session = loginWriter();
+        int resultSize = 0;
+        @Override
+        public void run() {
+            try{
+                run0();
+            } catch (RepositoryException e) {
+                throw new RuntimeException(e);
+            }
+        }
+
+        private void run0() throws RepositoryException {
+            session.refresh(false);
+            QueryManager qm = session.getWorkspace().getQueryManager();
+            Query q = qm.createQuery("select * from [nt:base] where [" + indexedPropName + "] = $status", Query.JCR_SQL2);
+            q.bindValue("status", session.getValueFactory().createValue(randomStatus()));
+            QueryResult result = q.execute();
+
+            //With property index at time traversing index wins (somehow reporting lower cost)
+            //and that leads to warning. So limit the iterator size
+            resultSize += Iterators.size(Iterators.limit(result.getNodes(), 500));
+        }
+    }
+
+    private class Mutator implements Runnable {
+        final Session session = loginWriter();
+        int mutationCount = 0;
+        @Override
+        public void run() {
+            try{
+                run0();
+            } catch (RepositoryException e) {
+                throw new RuntimeException(e);
+            }
+        }
+
+        private void run0() throws RepositoryException {
+            TestContext ctx = contexts.get(random.nextInt(contexts.size()));
+            String path = ctx.paths.peek();
+            session.refresh(false);
+            if (path != null){
+                Node node = session.getNode(path);
+                if(node.hasProperty(indexedPropName)){
+                    String value = node.getProperty(indexedPropName).getString();
+                    String newValue = Status.valueOf(value).next().name();
+                    node.setProperty(indexedPropName, newValue);
+                    session.save();
+                    mutationCount++;
+                }
+            }
+        }
+    }
+}
