Index: oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectory.java =================================================================== --- oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectory.java (nonexistent) +++ oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectory.java (working copy) @@ -0,0 +1,203 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.index.lucene; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Collection; +import java.util.Set; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +import com.google.common.collect.Sets; + +import org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectory.BlobFactory; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static com.google.common.base.Preconditions.checkNotNull; +import static java.util.Arrays.asList; +import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; +import static org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.squeeze; + +/** + * A directory implementation that buffers changes until {@link #close()}, + * except for blob values. Those are written immediately to the store. + */ +public final class BufferedOakDirectory extends Directory { + + private static final Logger LOG = LoggerFactory.getLogger(BufferedOakDirectory.class); + + private final BlobFactory blobFactory; + + private final String dataNodeName; + + private final IndexDefinition definition; + + private final OakDirectory base; + + private final Set bufferedForDelete = Sets.newConcurrentHashSet(); + + private NodeBuilder bufferedBuilder = EMPTY_NODE.builder(); + + private OakDirectory buffered; + + private int deleteCount; + + public BufferedOakDirectory(@Nonnull NodeBuilder builder, + @Nonnull String dataNodeName, + @Nonnull IndexDefinition definition, + @Nullable BlobStore blobStore) { + this.blobFactory = blobStore != null ? + new OakDirectory.BlobStoreBlobFactory(blobStore) : + new OakDirectory.NodeBuilderBlobFactory(builder); + this.dataNodeName = checkNotNull(dataNodeName); + this.definition = checkNotNull(definition); + this.base = new OakDirectory(checkNotNull(builder), dataNodeName, + definition, false, blobFactory); + reopenBuffered(); + } + + @Override + public String[] listAll() throws IOException { + Set all = Sets.newTreeSet(); + all.addAll(asList(base.listAll())); + all.addAll(asList(buffered.listAll())); + all.removeAll(bufferedForDelete); + return all.toArray(new String[all.size()]); + } + + @Override + public boolean fileExists(String name) throws IOException { + if (bufferedForDelete.contains(name)) { + return false; + } + return buffered.fileExists(name) || base.fileExists(name); + } + + @Override + public void deleteFile(String name) throws IOException { + LOG.debug("deleteFile({})", name); + if (base.fileExists(name)) { + bufferedForDelete.add(name); + } + if (buffered.fileExists(name)) { + buffered.deleteFile(name); + fileDeleted(); + } + } + + @Override + public long fileLength(String name) throws IOException { + if (bufferedForDelete.contains(name)) { + throw new FileNotFoundException(name); + } + Directory dir = base; + if (buffered.fileExists(name)) { + dir = buffered; + } + return dir.fileLength(name); + } + + @Override + public IndexOutput createOutput(String name, IOContext context) + throws IOException { + LOG.debug("createOutput({})", name); + bufferedForDelete.remove(name); + return buffered.createOutput(name, context); + } + + @Override + public void sync(Collection names) throws IOException { + buffered.sync(names); + base.sync(names); + } + + @Override + public IndexInput openInput(String name, IOContext context) + throws IOException { + LOG.debug("openInput({})", name); + if (bufferedForDelete.contains(name)) { + throw new FileNotFoundException(name); + } + Directory dir = base; + if (buffered.fileExists(name)) { + dir = buffered; + } + return dir.openInput(name, context); + } + + @Override + public Lock makeLock(String name) { + return base.makeLock(name); + } + + @Override + public void clearLock(String name) throws IOException { + base.clearLock(name); + } + + @Override + public void close() throws IOException { + LOG.debug("close()"); + buffered.close(); + // copy buffered files to base + for (String name : buffered.listAll()) { + buffered.copy(base, name); + } + // remove files marked as deleted + for (String name : bufferedForDelete) { + base.deleteFile(name); + } + base.close(); + } + + @Override + public void setLockFactory(LockFactory lockFactory) throws IOException { + base.setLockFactory(lockFactory); + } + + @Override + public LockFactory getLockFactory() { + return base.getLockFactory(); + } + + private void fileDeleted() throws IOException { + // get rid of non existing files once in a while + if (++deleteCount >= 100) { + buffered.close(); + reopenBuffered(); + } + } + + private void reopenBuffered() { + // squeeze out child nodes marked as non existing + // those are files that were created and later deleted again + bufferedBuilder = squeeze(bufferedBuilder.getNodeState()).builder(); + buffered = new OakDirectory(bufferedBuilder, dataNodeName, + definition, false, blobFactory); + } +} Property changes on: oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectory.java ___________________________________________________________________ Added: svn:eol-style ## -0,0 +1 ## +native \ No newline at end of property Index: oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java =================================================================== --- oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java (revision 1773940) +++ oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java (working copy) @@ -28,6 +28,7 @@ import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.collect.ImmutableSet; @@ -40,6 +41,7 @@ import org.apache.jackrabbit.oak.commons.StringUtils; import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob; import org.apache.jackrabbit.oak.spi.blob.BlobOptions; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.util.PerfLogger; @@ -65,6 +67,7 @@ import static org.apache.jackrabbit.oak.api.Type.BINARIES; import static org.apache.jackrabbit.oak.api.Type.STRINGS; import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INDEX_DATA_CHILD_NAME; +import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty; import static org.apache.jackrabbit.oak.spi.blob.BlobOptions.UploadType.SYNCHRONOUS; @@ -92,20 +95,25 @@ private final Set fileNamesAtStart; private final boolean activeDeleteEnabled; private final String indexName; - @Nullable - private final GarbageCollectableBlobStore blobStore; + private final BlobFactory blobFactory; private volatile boolean dirty; public OakDirectory(NodeBuilder builder, IndexDefinition definition, boolean readOnly) { - this(builder, INDEX_DATA_CHILD_NAME, definition, readOnly, null); + this(builder, INDEX_DATA_CHILD_NAME, definition, readOnly); } public OakDirectory(NodeBuilder builder, String dataNodeName, IndexDefinition definition, boolean readOnly) { - this(builder, dataNodeName, definition, readOnly, null); + this(builder, dataNodeName, definition, readOnly, new NodeBuilderBlobFactory(builder)); } public OakDirectory(NodeBuilder builder, String dataNodeName, IndexDefinition definition, - boolean readOnly, @Nullable GarbageCollectableBlobStore blobStore) { + boolean readOnly, @Nullable GarbageCollectableBlobStore blobStore) { + this(builder, dataNodeName, definition, readOnly, + blobStore != null ? new BlobStoreBlobFactory(blobStore) : new NodeBuilderBlobFactory(builder)); + } + + public OakDirectory(NodeBuilder builder, String dataNodeName, IndexDefinition definition, + boolean readOnly, BlobFactory blobFactory) { this.lockFactory = NoLockFactory.getNoLockFactory(); this.builder = builder; this.directoryBuilder = readOnly ? builder.getChildNode(dataNodeName) : builder.child(dataNodeName); @@ -115,7 +123,7 @@ this.fileNamesAtStart = ImmutableSet.copyOf(this.fileNames); this.activeDeleteEnabled = definition.getActiveDeleteEnabled(); this.indexName = definition.getIndexName(); - this.blobStore = blobStore; + this.blobFactory = blobFactory; } @Override @@ -161,7 +169,7 @@ @Override public long fileLength(String name) throws IOException { NodeBuilder file = directoryBuilder.getChildNode(name); - OakIndexInput input = new OakIndexInput(name, file, indexName, blobStore); + OakIndexInput input = new OakIndexInput(name, file, indexName, blobFactory); try { return input.length(); } finally { @@ -186,7 +194,7 @@ } fileNames.add(name); markDirty(); - return new OakIndexOutput(name, file, indexName, blobStore); + return new OakIndexOutput(name, file, indexName, blobFactory); } @@ -195,7 +203,7 @@ throws IOException { NodeBuilder file = directoryBuilder.getChildNode(name); if (file.exists()) { - return new OakIndexInput(name, file, indexName, blobStore); + return new OakIndexInput(name, file, indexName, blobFactory); } else { String msg = String.format("[%s] %s", indexName, name); throw new FileNotFoundException(msg); @@ -241,6 +249,38 @@ return "Directory for " + definition.getIndexName(); } + /** + * Copies the file with the given {@code name} to the {@code dest} + * directory. The file is copied 'by reference'. That is, the file in the + * destination directory will reference the same blob values as the source + * file. + *

+ * This method is a no-op if the file does not exist in this directory. + * + * @param dest the destination directory. + * @param name the name of the file to copy. + * @throws IOException if an error occurs while copying the file. + * @throws IllegalArgumentException if the destination directory does not + * use the same {@link BlobFactory} as {@code this} directory. + */ + void copy(OakDirectory dest, String name) + throws IOException { + if (blobFactory != dest.blobFactory) { + throw new IllegalArgumentException("Source and destination " + + "directory must reference the same BlobFactory"); + } + NodeBuilder file = directoryBuilder.getChildNode(name); + if (file.exists()) { + // overwrite potentially already existing child + NodeBuilder destFile = dest.directoryBuilder.setChildNode(name, EMPTY_NODE); + for (PropertyState p : file.getProperties()) { + destFile.setProperty(p); + } + dest.fileNames.add(name); + dest.markDirty(); + } + } + public boolean isDirty() { return dirty; } @@ -340,10 +380,10 @@ private final String dirDetails; - private final GarbageCollectableBlobStore blobStore; + private final BlobFactory blobFactory; public OakIndexFile(String name, NodeBuilder file, String dirDetails, - @Nullable GarbageCollectableBlobStore blobStore) { + @Nonnull BlobFactory blobFactory) { this.name = name; this.file = file; this.dirDetails = dirDetails; @@ -350,7 +390,7 @@ this.blobSize = determineBlobSize(file); this.uniqueKey = readUniqueKey(file); this.blob = new byte[blobSize]; - this.blobStore = blobStore; + this.blobFactory = checkNotNull(blobFactory); PropertyState property = file.getProperty(JCR_DATA); if (property != null && property.getType() == BINARIES) { @@ -381,7 +421,7 @@ this.length = that.length; this.data = newArrayList(that.data); this.dataModified = that.dataModified; - this.blobStore = that.blobStore; + this.blobFactory = that.blobFactory; } private void loadBlob(int i) throws IOException { @@ -410,7 +450,7 @@ new ByteArrayInputStream(uniqueKey)); } - Blob b = writeBlob(in); + Blob b = blobFactory.createBlob(in); if (index < data.size()) { data.set(index, b); } else { @@ -422,25 +462,6 @@ } } - /** - * Writes the blob to the blobstore directly if available. - * - * @param in input stream - * @return - * @throws IOException - */ - private Blob writeBlob(InputStream in) throws IOException { - if (blobStore != null) { - if (!ENABLE_AYNC_DS) { - return new BlobStoreBlob(blobStore, - blobStore.writeBlob(in, new BlobOptions().setUpload(SYNCHRONOUS))); - } else { - return new BlobStoreBlob(blobStore, blobStore.writeBlob(in)); - } - } - return file.createBlob(in); - } - public void seek(long pos) throws IOException { // seek() may be called with pos == length // see https://issues.apache.org/jira/browse/LUCENE-1196 @@ -558,10 +579,10 @@ private final String dirDetails; public OakIndexInput(String name, NodeBuilder file, String dirDetails, - @Nullable GarbageCollectableBlobStore blobStore) { + BlobFactory blobFactory) { super(name); this.dirDetails = dirDetails; - this.file = new OakIndexFile(name, file, dirDetails, blobStore); + this.file = new OakIndexFile(name, file, dirDetails, blobFactory); clones = WeakIdentityMap.newConcurrentHashMap(); } @@ -641,9 +662,10 @@ private final String dirDetails; private final OakIndexFile file; - public OakIndexOutput(String name, NodeBuilder file, String dirDetails, GarbageCollectableBlobStore blobStore) throws IOException { + public OakIndexOutput(String name, NodeBuilder file, String dirDetails, + BlobFactory blobFactory) throws IOException { this.dirDetails = dirDetails; - this.file = new OakIndexFile(name, file, dirDetails, blobStore); + this.file = new OakIndexFile(name, file, dirDetails, blobFactory); } @Override @@ -699,4 +721,42 @@ } + interface BlobFactory { + + Blob createBlob(InputStream in) throws IOException; + } + + static final class NodeBuilderBlobFactory implements BlobFactory { + + private final NodeBuilder builder; + + NodeBuilderBlobFactory(NodeBuilder builder) { + this.builder = builder; + } + + @Override + public Blob createBlob(InputStream in) throws IOException { + return builder.createBlob(in); + } + } + + static final class BlobStoreBlobFactory implements BlobFactory { + + private final BlobStore store; + + BlobStoreBlobFactory(BlobStore store) { + this.store = store; + } + + @Override + public Blob createBlob(InputStream in) throws IOException { + String blobId; + if (!ENABLE_AYNC_DS) { + blobId = store.writeBlob(in, new BlobOptions().setUpload(SYNCHRONOUS)); + } else { + blobId = store.writeBlob(in); + } + return new BlobStoreBlob(store, blobId); + } + } } Index: oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriter.java =================================================================== --- oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriter.java (revision 1773940) +++ oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/DefaultIndexWriter.java (working copy) @@ -28,6 +28,7 @@ import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.plugins.index.lucene.BufferedOakDirectory; import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier; import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition; import org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectory; @@ -153,7 +154,7 @@ private IndexWriter getWriter() throws IOException { if (writer == null) { final long start = PERF_LOGGER.start(); - directory = newIndexDirectory(definition, definitionBuilder, dirName, blobStore); + directory = newIndexDirectory(definition, definitionBuilder, dirName, indexCopier != null, blobStore); IndexWriterConfig config; if (indexCopier != null){ directory = indexCopier.wrapForWrite(definition, directory, reindex, dirName); Index: oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/IndexWriterUtils.java =================================================================== --- oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/IndexWriterUtils.java (revision 1773940) +++ oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/writer/IndexWriterUtils.java (working copy) @@ -26,6 +26,7 @@ import javax.annotation.Nullable; +import org.apache.jackrabbit.oak.plugins.index.lucene.BufferedOakDirectory; import org.apache.jackrabbit.oak.plugins.index.lucene.FieldNames; import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition; import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants; @@ -74,7 +75,8 @@ } public static Directory newIndexDirectory(IndexDefinition indexDefinition, - NodeBuilder definition, String dirName, @Nullable GarbageCollectableBlobStore blobStore) + NodeBuilder definition, String dirName, boolean buffered, + @Nullable GarbageCollectableBlobStore blobStore) throws IOException { String path = null; if (LuceneIndexConstants.PERSISTENCE_FILE.equalsIgnoreCase( @@ -82,7 +84,11 @@ path = definition.getString(PERSISTENCE_PATH); } if (path == null) { - return new OakDirectory(definition, dirName, indexDefinition, false, blobStore); + if (buffered) { + return new BufferedOakDirectory(definition, dirName, indexDefinition, blobStore); + } else { + return new OakDirectory(definition, dirName, indexDefinition, false, blobStore); + } } else { // try { File file = new File(path); Index: oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectoryTest.java =================================================================== --- oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectoryTest.java (nonexistent) +++ oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectoryTest.java (working copy) @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.index.lucene; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Random; + +import com.google.common.collect.Sets; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.apache.jackrabbit.oak.spi.state.NodeState; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.junit.Test; + +import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INDEX_DATA_CHILD_NAME; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class BufferedOakDirectoryTest { + + private Random rnd = new Random(); + + private NodeState root = EmptyNodeState.EMPTY_NODE; + + private NodeBuilder builder = root.builder(); + + @Test + public void createOutput() throws Exception { + Directory buffered = createDir(builder, true); + byte[] data = writeFile(buffered, "file"); + + // must not be visible yet in base + Directory base = createDir(builder, false); + assertFalse(base.fileExists("file")); + base.close(); + + buffered.close(); + + // now it must exist + base = createDir(builder, false); + assertFile(base, "file", data); + base.close(); + } + + @Test + public void listAll() throws Exception { + Directory buffered = createDir(builder, true); + writeFile(buffered, "file"); + + // must only show up after buffered is closed + Directory base = createDir(builder, false); + assertEquals(0, base.listAll().length); + base.close(); + buffered.close(); + base = createDir(builder, false); + assertEquals(Sets.newHashSet("file"), Sets.newHashSet(base.listAll())); + base.close(); + + buffered = createDir(builder, true); + buffered.deleteFile("file"); + assertEquals(0, buffered.listAll().length); + + // must only disappear after buffered is closed + base = createDir(builder, false); + assertEquals(Sets.newHashSet("file"), Sets.newHashSet(base.listAll())); + base.close(); + buffered.close(); + base = createDir(builder, false); + assertEquals(0, base.listAll().length); + base.close(); + } + + private void assertFile(Directory dir, String file, byte[] expected) + throws IOException { + assertTrue(dir.fileExists(file)); + assertEquals(expected.length, dir.fileLength(file)); + IndexInput in = dir.openInput(file, IOContext.DEFAULT); + byte[] data = new byte[expected.length]; + in.readBytes(data, 0, data.length); + in.close(); + assertTrue(Arrays.equals(expected, data)); + } + + private Directory createDir(NodeBuilder builder, boolean buffered) { + IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); + if (buffered) { + return new BufferedOakDirectory(builder, INDEX_DATA_CHILD_NAME, def, null); + } else { + return new OakDirectory(builder, def,false); + } + } + + private byte[] randomBytes(int size) { + byte[] data = new byte[size]; + rnd.nextBytes(data); + return data; + } + + private byte[] writeFile(Directory dir, String name) throws IOException { + byte[] data = randomBytes(rnd.nextInt((int) (16 * FileUtils.ONE_KB))); + IndexOutput out = dir.createOutput(name, IOContext.DEFAULT); + out.writeBytes(data, data.length); + out.close(); + return data; + } +} Property changes on: oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/BufferedOakDirectoryTest.java ___________________________________________________________________ Added: svn:eol-style ## -0,0 +1 ## +native \ No newline at end of property Index: oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexTest.java =================================================================== --- oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexTest.java (revision 1773940) +++ oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexTest.java (working copy) @@ -371,13 +371,13 @@ } private void purgeDeletedDocs(NodeBuilder idx, IndexDefinition definition) throws IOException { - IndexWriter writer = new IndexWriter(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME, null), getIndexWriterConfig(definition, true)); + IndexWriter writer = new IndexWriter(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME, false, null), getIndexWriterConfig(definition, true)); writer.forceMergeDeletes(); writer.close(); } public int getDeletedDocCount(NodeBuilder idx, IndexDefinition definition) throws IOException { - IndexReader reader = DirectoryReader.open(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME, null)); + IndexReader reader = DirectoryReader.open(newIndexDirectory(definition, idx, LuceneIndexConstants.INDEX_DATA_CHILD_NAME, false, null)); int numDeletes = reader.numDeletedDocs(); reader.close(); return numDeletes; Index: oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/CopyOnWriteDirectoryTest.java =================================================================== --- oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/CopyOnWriteDirectoryTest.java (revision 1773940) +++ oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/CopyOnWriteDirectoryTest.java (working copy) @@ -40,7 +40,6 @@ import org.apache.lucene.store.IndexOutput; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -87,13 +86,12 @@ } // OAK-5238 - @Ignore @Test public void copyOnWrite() throws Exception { IndexDefinition def = new IndexDefinition(ns.getRoot(), ns.getRoot(), "/foo"); NodeBuilder builder = ns.getRoot().builder(); Directory remote = IndexWriterUtils.newIndexDirectory( - def, builder.child("foo"), INDEX_DATA_CHILD_NAME, null); + def, builder.child("foo"), INDEX_DATA_CHILD_NAME, true, null); Directory dir = copier.wrapForWrite(def, remote, false, INDEX_DATA_CHILD_NAME); addFiles(dir); writeTree(builder);