Index: lucene/src/java/org/apache/lucene/index/DirectoryReader.java --- lucene/src/java/org/apache/lucene/index/DirectoryReader.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/java/org/apache/lucene/index/DirectoryReader.java Sat Nov 13 13:41:21 2010 -0500 @@ -320,7 +320,10 @@ } buffer.append(getClass().getSimpleName()); buffer.append('('); - buffer.append(segmentInfos.getCurrentSegmentFileName()); + final String segmentsFile = segmentInfos.getCurrentSegmentFileName(); + if (segmentsFile != null) { + buffer.append(segmentsFile); + } if (writer != null) { buffer.append(":nrt"); } Index: lucene/src/java/org/apache/lucene/index/IndexWriter.java --- lucene/src/java/org/apache/lucene/index/IndexWriter.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/java/org/apache/lucene/index/IndexWriter.java Sat Nov 13 13:41:21 2010 -0500 @@ -403,20 +403,20 @@ // this method is called: poolReaders = true; - flush(true, true, false); - // Prevent segmentInfos from changing while opening the // reader; in theory we could do similar retry logic, // just like we do when loading segments_N + IndexReader r; synchronized(this) { - applyDeletes(); - final IndexReader r = new DirectoryReader(this, segmentInfos, config.getReaderTermsIndexDivisor(), codecs); + flush(false, true, true); + r = new DirectoryReader(this, segmentInfos, config.getReaderTermsIndexDivisor(), codecs); if (infoStream != null) { message("return reader version=" + r.getVersion() + " reader=" + r); } - return r; } - + maybeMerge(); + + return r; } /** Holds shared SegmentReader instances. IndexWriter uses @@ -2903,7 +2903,7 @@ List infos = new ArrayList(); for (Directory dir : dirs) { if (infoStream != null) { - message("process directory " + dir); + message("addIndexes: process directory " + dir); } SegmentInfos sis = new SegmentInfos(codecs); // read infos from dir sis.read(dir, codecs); @@ -2911,13 +2911,14 @@ for (SegmentInfo info : sis) { assert !infos.contains(info): "dup info dir=" + info.dir + " name=" + info.name; - if (infoStream != null) { - message("process segment=" + info.name); - } docCount += info.docCount; String newSegName = newSegmentName(); String dsName = info.getDocStoreSegment(); + if (infoStream != null) { + message("addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName); + } + // Determine if the doc store of this segment needs to be copied. It's // only relevant for segments who share doc store with others, because // the DS might have been copied already, in which case we just want @@ -3847,6 +3848,13 @@ } } + // if a mergedSegmentWarmer is installed, we must merge + // the doc stores because we will open a full + // SegmentReader on the merged segment: + if (!mergeDocStores && mergedSegmentWarmer != null && currentDocStoreSegment != null && lastDocStoreSegment != null && lastDocStoreSegment.equals(currentDocStoreSegment)) { + mergeDocStores = true; + } + final int docStoreOffset; final String docStoreSegment; final boolean docStoreIsCompoundFile; @@ -4107,7 +4115,16 @@ deleter.incRef(merge.mergeFiles); } - if (poolReaders && mergedSegmentWarmer != null) { + final String currentDocStoreSegment = docWriter.getDocStoreSegment(); + + // canWarm would only be false if the merged segment + // warmer was not installed when this merge was + // started but then was installed part way while the + // merge was running; in this case, becase we did not + // merge doc stores, we cannot call the warmer + final boolean canWarm = merge.info.getDocStoreSegment() == null || currentDocStoreSegment == null || !merge.info.getDocStoreSegment().equals(currentDocStoreSegment); + + if (poolReaders && mergedSegmentWarmer != null && canWarm) { // Load terms index & doc stores so the segment // warmer can run searches, load documents/term // vectors Index: lucene/src/test/org/apache/lucene/index/TestAddIndexes.java --- lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Sat Nov 13 13:41:21 2010 -0500 @@ -771,19 +771,34 @@ void doBody(int j, Directory[] dirs) throws Throwable { switch(j%5) { case 0: + if (VERBOSE) { + System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes + optimize"); + } writer2.addIndexes(dirs); writer2.optimize(); break; case 1: + if (VERBOSE) { + System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes"); + } writer2.addIndexes(dirs); break; case 2: + if (VERBOSE) { + System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(IR[])"); + } writer2.addIndexes(readers); break; case 3: + if (VERBOSE) { + System.out.println("TEST: " + Thread.currentThread().getName() + ": optimize"); + } writer2.optimize(); break; case 4: + if (VERBOSE) { + System.out.println("TEST: " + Thread.currentThread().getName() + ": commit"); + } writer2.commit(); } } @@ -814,15 +829,24 @@ final int NUM_COPY = 50; CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY); + if (VERBOSE) { + c.writer2.setInfoStream(System.out); + } c.launchThreads(-1); Thread.sleep(500); // Close w/o first stopping/joining the threads + if (VERBOSE) { + System.out.println("TEST: now close(false)"); + } c.close(false); c.joinThreads(); + if (VERBOSE) { + System.out.println("TEST: done join threads"); + } _TestUtil.checkIndex(c.dir2); c.closeDir(); Index: lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java --- lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Sat Nov 13 13:41:21 2010 -0500 @@ -46,12 +46,20 @@ public void eval(MockDirectoryWrapper dir) throws IOException { if (doFail && (Thread.currentThread().getName().equals("main") || Thread.currentThread().getName().equals("Main Thread"))) { + boolean isDoFlush = false; + boolean isClose = false; StackTraceElement[] trace = new Exception().getStackTrace(); for (int i = 0; i < trace.length; i++) { if ("doFlush".equals(trace[i].getMethodName())) { - hitExc = true; - throw new IOException("now failing during flush"); + isDoFlush = true; } + if ("close".equals(trace[i].getMethodName())) { + isClose = true; + } + } + if (isDoFlush && !isClose) { + hitExc = true; + throw new IOException("now failing during flush"); } } } Index: lucene/src/test/org/apache/lucene/index/TestIndexWriter.java --- lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Sat Nov 13 13:41:21 2010 -0500 @@ -21,7 +21,6 @@ import java.io.File; import java.io.IOException; import java.io.PrintStream; -import java.io.Reader; import java.io.StringReader; import java.util.List; import java.util.ArrayList; @@ -36,12 +35,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.MockTokenFilter; import org.apache.lucene.analysis.MockTokenizer; -import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; @@ -53,7 +48,6 @@ import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field.TermVector; import org.apache.lucene.index.IndexWriterConfig.OpenMode; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; @@ -64,7 +58,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.LockFactory; @@ -76,7 +69,6 @@ import org.apache.lucene.util._TestUtil; import org.apache.lucene.util.ThreadInterruptedException; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.Bits; public class TestIndexWriter extends LuceneTestCase { @@ -2342,6 +2334,7 @@ final Directory dir = newDirectory(); final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + ((LogMergePolicy) w.getMergePolicy()).setMergeFactor(5); w.commit(); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[NUM_THREADS]; Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java --- lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java Sat Nov 13 13:41:21 2010 -0500 @@ -22,6 +22,7 @@ import java.util.Collections; import java.util.List; import java.util.Random; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -38,7 +39,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util._TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.ThreadInterruptedException; @@ -838,6 +838,7 @@ public void testSegmentWarmer() throws Exception { Directory dir = newDirectory(); + final AtomicBoolean didWarm = new AtomicBoolean(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(2).setReaderPooling(true)); ((LogMergePolicy) w.getMergePolicy()).setMergeFactor(10); @@ -846,6 +847,7 @@ final IndexSearcher s = new IndexSearcher(r); final TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10); assertEquals(20, hits.totalHits); + didWarm.set(true); } }); @@ -857,5 +859,6 @@ w.waitForMerges(); w.close(); dir.close(); + assertTrue(didWarm.get()); } } Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java --- lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java Sat Nov 13 13:41:21 2010 -0500 @@ -298,15 +298,23 @@ public void eval(MockDirectoryWrapper dir) throws IOException { if (doFail) { StackTraceElement[] trace = new Exception().getStackTrace(); + boolean sawAbortOrFlushDoc = false; + boolean sawClose = false; for (int i = 0; i < trace.length; i++) { if ("abort".equals(trace[i].getMethodName()) || "flushDocument".equals(trace[i].getMethodName())) { - if (onlyOnce) - doFail = false; - //System.out.println(Thread.currentThread().getName() + ": now fail"); - //new Throwable().printStackTrace(System.out); - throw new IOException("now failing on purpose"); + sawAbortOrFlushDoc = true; } + if ("close".equals(trace[i].getMethodName())) { + sawClose = true; + } + } + if (sawAbortOrFlushDoc && !sawClose) { + if (onlyOnce) + doFail = false; + //System.out.println(Thread.currentThread().getName() + ": now fail"); + //new Throwable().printStackTrace(System.out); + throw new IOException("now failing on purpose"); } } } Index: lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java --- lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java Sat Nov 13 13:41:21 2010 -0500 @@ -19,14 +19,15 @@ import java.io.IOException; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LogMergePolicy; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.index.SerialMergeScheduler; import org.apache.lucene.index.Term; import org.apache.lucene.search.spans.SpanTermQuery; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; @@ -36,6 +37,8 @@ Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new SerialMergeScheduler())); + // asserts below requires no unexpected merges: + ((LogMergePolicy) writer.w.getMergePolicy()).setMergeFactor(10); // NOTE: cannot use writer.getReader because RIW (on // flipping a coin) may give us a newly opened reader, Index: lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java --- lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java Sat Nov 13 13:41:21 2010 -0500 @@ -19,14 +19,15 @@ import java.io.IOException; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Term; +import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LogMergePolicy; +import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.SerialMergeScheduler; -import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.SlowMultiReaderWrapper; +import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.OpenBitSet; @@ -154,6 +155,8 @@ Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new SerialMergeScheduler())); + // asserts below requires no unexpected merges: + ((LogMergePolicy) writer.w.getMergePolicy()).setMergeFactor(10); // NOTE: cannot use writer.getReader because RIW (on // flipping a coin) may give us a newly opened reader, Index: lucene/src/test/org/apache/lucene/store/MockDirectoryWrapper.java --- lucene/src/test/org/apache/lucene/store/MockDirectoryWrapper.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/store/MockDirectoryWrapper.java Sat Nov 13 13:41:21 2010 -0500 @@ -49,22 +49,29 @@ boolean trackDiskUsage = false; private Set unSyncedFiles; private Set createdFiles; + Set openFilesForWrite = new HashSet(); volatile boolean crashed; // use this for tracking files for crash. // additionally: provides debugging information in case you leave one open - Map files - = Collections.synchronizedMap(new IdentityHashMap()); - + Map openFileHandles = Collections.synchronizedMap(new IdentityHashMap()); + // NOTE: we cannot initialize the Map here due to the // order in which our constructor actually does this // member initialization vs when it calls super. It seems // like super is called, then our members are initialized: Map openFiles; + // Only tracked if noDeleteOpenFile is true: if an attempt + // is made to delete an open file, we enroll it here. + Set openFilesDeleted; + private synchronized void init() { - if (openFiles == null) + if (openFiles == null) { openFiles = new HashMap(); + openFilesDeleted = new HashSet(); + } + if (createdFiles == null) createdFiles = new HashSet(); if (unSyncedFiles == null) @@ -128,11 +135,13 @@ public synchronized void crash() throws IOException { crashed = true; openFiles = new HashMap(); + openFilesForWrite = new HashSet(); + openFilesDeleted = new HashSet(); Iterator it = unSyncedFiles.iterator(); unSyncedFiles = new HashSet(); // first force-close all files, so we can corrupt on windows etc. // clone the file map, as these guys want to remove themselves on close. - Map m = new IdentityHashMap(files); + Map m = new IdentityHashMap(openFileHandles); for (Closeable f : m.keySet()) try { f.close(); @@ -227,6 +236,21 @@ deleteFile(name, false); } + // sets the cause of the incoming ioe to be the stack + // trace when the offending file name was opened + private synchronized IOException fillOpenTrace(IOException ioe, String name, boolean input) { + for(Map.Entry ent : openFileHandles.entrySet()) { + if (input && ent.getKey() instanceof MockIndexInputWrapper && ((MockIndexInputWrapper) ent.getKey()).name.equals(name)) { + ioe.initCause(ent.getValue()); + break; + } else if (!input && ent.getKey() instanceof MockIndexOutputWrapper && ((MockIndexOutputWrapper) ent.getKey()).name.equals(name)) { + ioe.initCause(ent.getValue()); + break; + } + } + return ioe; + } + private synchronized void deleteFile(String name, boolean forced) throws IOException { maybeThrowDeterministicException(); @@ -236,14 +260,21 @@ if (unSyncedFiles.contains(name)) unSyncedFiles.remove(name); - if (!forced) { - if (noDeleteOpenFile && openFiles.containsKey(name)) { - throw new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot delete"); + if (!forced && noDeleteOpenFile) { + if (openFiles.containsKey(name)) { + openFilesDeleted.add(name); + throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot delete"), name, true); + } else { + openFilesDeleted.remove(name); } } delegate.deleteFile(name); } + public synchronized Set getOpenDeletedFiles() { + return new HashSet(openFilesDeleted); + } + @Override public synchronized IndexOutput createOutput(String name) throws IOException { if (crashed) @@ -278,7 +309,8 @@ } } IndexOutput io = new MockIndexOutputWrapper(this, delegate.createOutput(name), name); - files.put(io, new RuntimeException("unclosed IndexOutput")); + openFileHandles.put(io, new RuntimeException("unclosed IndexOutput")); + openFilesForWrite.add(name); return io; } @@ -292,12 +324,18 @@ v = Integer.valueOf(v.intValue()+1); openFiles.put(name, v); } else { - openFiles.put(name, Integer.valueOf(1)); + openFiles.put(name, Integer.valueOf(1)); } } + // cannot open a file for input if it's still open for + // output, except for segments.gen and segments_N + if (openFilesForWrite.contains(name) && !name.startsWith("segments")) { + throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false); + } + IndexInput ii = new MockIndexInputWrapper(this, name, delegate.openInput(name)); - files.put(ii, new RuntimeException("unclosed IndexInput")); + openFileHandles.put(ii, new RuntimeException("unclosed IndexInput")); return ii; } @@ -331,11 +369,12 @@ public synchronized void close() throws IOException { if (openFiles == null) { openFiles = new HashMap(); + openFilesDeleted = new HashSet(); } if (noDeleteOpenFile && openFiles.size() > 0) { // print the first one as its very verbose otherwise Exception cause = null; - Iterator stacktraces = files.values().iterator(); + Iterator stacktraces = openFileHandles.values().iterator(); if (stacktraces.hasNext()) cause = stacktraces.next(); // RuntimeException instead of IOException because Index: lucene/src/test/org/apache/lucene/store/MockIndexInputWrapper.java --- lucene/src/test/org/apache/lucene/store/MockIndexInputWrapper.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/store/MockIndexInputWrapper.java Sat Nov 13 13:41:21 2010 -0500 @@ -27,7 +27,7 @@ public class MockIndexInputWrapper extends IndexInput { private MockDirectoryWrapper dir; - private String name; + final String name; private IndexInput delegate; private boolean isClone; @@ -52,12 +52,13 @@ if (v != null) { if (v.intValue() == 1) { dir.openFiles.remove(name); + dir.openFilesDeleted.remove(name); } else { v = Integer.valueOf(v.intValue()-1); dir.openFiles.put(name, v); } } - dir.files.remove(this); + dir.openFileHandles.remove(this); } } } Index: lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java --- lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java Sat Nov 13 11:49:58 2010 -0500 +++ lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java Sat Nov 13 13:41:21 2010 -0500 @@ -30,7 +30,7 @@ private MockDirectoryWrapper dir; private final IndexOutput delegate; private boolean first=true; - private final String name; + final String name; byte[] singleByte = new byte[1]; @@ -53,7 +53,10 @@ dir.maxUsedSize = size; } } - dir.files.remove(this); + synchronized(dir) { + dir.openFileHandles.remove(this); + dir.openFilesForWrite.remove(name); + } } @Override